00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033 #ifndef DWC_DEVICE_ONLY
00034
00039 #include "dwc_otg_hcd.h"
00040 #include "dwc_otg_regs.h"
00041
00042
00043 static inline uint8_t frame_list_idx(uint16_t frame)
00044 {
00045 return (frame & (MAX_FRLIST_EN_NUM - 1));
00046 }
00047
00048 static inline uint16_t desclist_idx_inc(uint16_t idx, uint16_t inc, uint8_t speed)
00049 {
00050 return (idx + inc) &
00051 (((speed == DWC_OTG_EP_SPEED_HIGH) ? MAX_DMA_DESC_NUM_HS_ISOC : MAX_DMA_DESC_NUM_GENERIC) - 1);
00052 }
00053
00054 static inline uint16_t desclist_idx_dec(uint16_t idx, uint16_t inc, uint8_t speed)
00055 {
00056 return (idx - inc) &
00057 (((speed == DWC_OTG_EP_SPEED_HIGH) ? MAX_DMA_DESC_NUM_HS_ISOC : MAX_DMA_DESC_NUM_GENERIC) - 1);
00058 }
00059
00060 static inline uint16_t max_desc_num(dwc_otg_qh_t * qh)
00061 {
00062 return (((qh->ep_type == UE_ISOCHRONOUS) && (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH))
00063 ?
00064 MAX_DMA_DESC_NUM_HS_ISOC
00065 :
00066 MAX_DMA_DESC_NUM_GENERIC);
00067 }
00068 static inline uint16_t frame_incr_val(dwc_otg_qh_t * qh)
00069 {
00070 return ((qh->dev_speed == DWC_OTG_EP_SPEED_HIGH)
00071 ? ((qh->interval + 8 - 1) / 8)
00072 :
00073 qh->interval);
00074 }
00075
00076 static int desc_list_alloc(dwc_otg_qh_t * qh)
00077 {
00078 int retval = 0;
00079
00080 qh->desc_list = (dwc_otg_host_dma_desc_t *)
00081 dwc_dma_alloc(sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh),
00082 &qh->desc_list_dma
00083 );
00084
00085 if (!qh->desc_list) {
00086 retval = -DWC_E_NO_MEMORY;
00087 DWC_ERROR("%s: DMA descriptor list allocation failed\n", __func__);
00088
00089 }
00090
00091 dwc_memset(qh->desc_list, 0x00, sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh));
00092
00093
00094 qh->n_bytes = (uint32_t *) dwc_alloc(sizeof(uint32_t) * max_desc_num(qh));
00095
00096 if (!qh->n_bytes) {
00097 retval = -DWC_E_NO_MEMORY;
00098 DWC_ERROR("%s: Failed to allocate array for descriptors' size actual values\n",
00099 __func__);
00100
00101 }
00102 return retval;
00103
00104 }
00105
00106 static void desc_list_free(dwc_otg_qh_t * qh)
00107 {
00108 if(qh->desc_list) {
00109 dwc_dma_free(max_desc_num(qh), qh->desc_list, qh->desc_list_dma);
00110 qh->desc_list = NULL;
00111 }
00112
00113 if (qh->n_bytes) {
00114 dwc_free(qh->n_bytes);
00115 qh->n_bytes = NULL;
00116 }
00117 }
00118
00119 static int frame_list_alloc(dwc_otg_hcd_t * hcd)
00120 {
00121 int retval = 0;
00122 if (hcd->frame_list)
00123 return 0;
00124
00125 hcd->frame_list = dwc_dma_alloc(4 * MAX_FRLIST_EN_NUM,
00126 &hcd->frame_list_dma
00127 );
00128 if (!hcd->frame_list) {
00129 retval = -DWC_E_NO_MEMORY;
00130 DWC_ERROR("%s: Frame List allocation failed\n", __func__);
00131 }
00132
00133 dwc_memset(hcd->frame_list, 0x00, 4 * MAX_FRLIST_EN_NUM);
00134
00135 return retval;
00136 }
00137
00138 static void frame_list_free(dwc_otg_hcd_t * hcd)
00139 {
00140 if (!hcd->frame_list)
00141 return;
00142
00143 dwc_dma_free(4 * MAX_FRLIST_EN_NUM, hcd->frame_list, hcd->frame_list_dma);
00144 hcd->frame_list = NULL;
00145 }
00146
00147 static void per_sched_enable(dwc_otg_hcd_t * hcd, uint16_t fr_list_en)
00148 {
00149
00150 hcfg_data_t hcfg;
00151
00152 hcfg.d32 = dwc_read_reg32(&hcd->core_if->host_if->host_global_regs->hcfg);
00153
00154 if (hcfg.b.perschedstat) {
00155
00156 return;
00157 }
00158
00159 dwc_write_reg32(&hcd->core_if->host_if->host_global_regs->hflbaddr, hcd->frame_list_dma);
00160
00161 switch(fr_list_en) {
00162 case 64:
00163 hcfg.b.frlisten = 3;
00164 break;
00165 case 32:
00166 hcfg.b.frlisten = 2;
00167 break;
00168 case 16:
00169 hcfg.b.frlisten = 1;
00170 case 8:
00171 hcfg.b.frlisten = 0;
00172 default:
00173 break;
00174 }
00175
00176 hcfg.b.perschedena = 1;
00177
00178 DWC_DEBUGPL(DBG_HCD, "Enabling Periodic schedule\n");
00179 dwc_write_reg32(&hcd->core_if->host_if->host_global_regs->hcfg, hcfg.d32);
00180
00181 }
00182
00183 static void per_sched_disable(dwc_otg_hcd_t * hcd)
00184 {
00185 hcfg_data_t hcfg;
00186
00187 hcfg.d32 = dwc_read_reg32(&hcd->core_if->host_if->host_global_regs->hcfg);
00188
00189 if (!hcfg.b.perschedstat) {
00190
00191 return;
00192 }
00193 hcfg.b.perschedena = 0;
00194
00195 DWC_DEBUGPL(DBG_HCD, "Disabling Periodic schedule\n");
00196 dwc_write_reg32(&hcd->core_if->host_if->host_global_regs->hcfg, hcfg.d32);
00197 }
00198
00199
00200
00201
00202
00203 void update_frame_list(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh, uint8_t enable)
00204 {
00205 uint16_t i, j, inc;
00206 dwc_hc_t *hc = qh->channel;
00207
00208 inc = frame_incr_val(qh);
00209
00210 if (qh->ep_type == UE_ISOCHRONOUS)
00211 i = frame_list_idx(qh->sched_frame);
00212 else
00213 i = 0;
00214
00215 j = i;
00216 do {
00217 if (enable)
00218 hcd->frame_list[j] |= (1 << hc->hc_num);
00219 else
00220 hcd->frame_list[j] &= ~(1 << hc->hc_num);
00221 j = (j + inc) & (MAX_FRLIST_EN_NUM - 1);
00222 }
00223 while (j != i);
00224
00225 if (!enable)
00226 return;
00227
00228 hc->schinfo = 0;
00229 if (qh->channel->speed == DWC_OTG_EP_SPEED_HIGH) {
00230 j = 1;
00231 for (i = 0 ; i < 8 / qh->interval; i++) {
00232 hc->schinfo |= j;
00233 j = j << qh->interval;
00234 }
00235 }
00236 else {
00237 hc->schinfo = 0xff;
00238 }
00239 }
00240 #if 1
00241 void dump_frame_list(dwc_otg_hcd_t * hcd)
00242 {
00243 int i = 0;
00244 DWC_PRINTF("--FRAME LIST (hex) --\n");
00245 for (i = 0; i < MAX_FRLIST_EN_NUM; i++) {
00246 DWC_PRINTF("%x\t",hcd->frame_list[i]);
00247 if (!(i % 8) && i)
00248 DWC_PRINTF("\n");
00249 }
00250 DWC_PRINTF("\n----\n");
00251
00252 }
00253 #endif
00254
00255 static void release_channel_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00256 {
00257 dwc_hc_t *hc = qh->channel;
00258 if (dwc_qh_is_non_per(qh)) {
00259 hcd->non_periodic_channels--;
00260 }
00261 else {
00262 update_frame_list(hcd, qh, 0);
00263 }
00264
00265
00266
00267
00268 if (hc->qh) {
00269 dwc_otg_hc_cleanup(hcd->core_if, hc);
00270 DWC_CIRCLEQ_INSERT_TAIL(&hcd->free_hc_list, hc, hc_list_entry);
00271 hc->qh = NULL;
00272 }
00273
00274 qh->channel = NULL;
00275 qh->ntd = 0;
00276
00277 if (qh->desc_list) {
00278 dwc_memset(qh->desc_list, 0x00,
00279 sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh));
00280 }
00281 }
00282
00294 int dwc_otg_hcd_qh_init_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00295 {
00296 int retval = 0;
00297
00298 if (qh->do_split) {
00299 DWC_ERROR("SPLIT Transfers are not supported in Descriptor DMA.\n");
00300 return -1;
00301 }
00302
00303 retval = desc_list_alloc(qh);
00304
00305 if ((retval == 0) && (qh->ep_type == UE_ISOCHRONOUS || qh->ep_type == UE_INTERRUPT)) {
00306 if(!hcd->frame_list) {
00307 retval = frame_list_alloc(hcd);
00308
00309 if (retval == 0)
00310 per_sched_enable(hcd, MAX_FRLIST_EN_NUM);
00311 }
00312 }
00313
00314 qh->ntd = 0;
00315
00316 return retval;
00317 }
00318
00327 void dwc_otg_hcd_qh_free_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00328 {
00329 desc_list_free(qh);
00330
00331
00332
00333
00334
00335
00336
00337
00338 if (qh->channel)
00339 release_channel_ddma(hcd, qh);
00340
00341 if ((qh->ep_type == UE_ISOCHRONOUS || qh->ep_type == UE_INTERRUPT)
00342 && !hcd->periodic_channels && hcd->frame_list) {
00343
00344 per_sched_disable(hcd);
00345 frame_list_free(hcd);
00346 }
00347 }
00348
00349 static uint8_t frame_to_desc_idx(dwc_otg_qh_t * qh, uint16_t frame_idx)
00350 {
00351 if (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH) {
00352
00353
00354
00355
00356 return (frame_idx & ((MAX_DMA_DESC_NUM_HS_ISOC / 8) - 1)) * 8;
00357 }
00358 else {
00359 return (frame_idx & (MAX_DMA_DESC_NUM_GENERIC - 1));
00360 }
00361 }
00362
00363
00364
00365
00366
00367 static uint8_t calc_starting_frame(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh, uint8_t* skip_frames)
00368 {
00369 uint16_t frame = 0;
00370 hcd->frame_number = dwc_otg_hcd_get_frame_number(hcd);
00371
00372
00373
00374
00375
00376
00377
00378
00379
00380
00381
00382
00383
00384
00385
00386
00387 if (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH) {
00388
00389
00390
00391
00392
00393
00394
00395
00396
00397 if (dwc_micro_frame_num(hcd->frame_number) >= 5) {
00398 *skip_frames = 2 * 8;
00399 frame = dwc_frame_num_inc(hcd->frame_number, *skip_frames);
00400 }
00401 else {
00402 *skip_frames = 1 * 8;
00403 frame = dwc_frame_num_inc(hcd->frame_number, *skip_frames);
00404 }
00405
00406 frame = dwc_full_frame_num(frame);
00407 } else {
00408
00409
00410
00411
00412
00413 *skip_frames = 1;
00414 frame = dwc_frame_num_inc(hcd->frame_number, 2);
00415 }
00416
00417 return frame;
00418 }
00419
00420
00421
00422
00423 static uint8_t recalc_initial_desc_idx(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00424 {
00425 uint16_t frame = 0, fr_idx, fr_idx_tmp;
00426 uint8_t skip_frames = 0 ;
00427
00428
00429
00430
00431
00432
00433
00434
00435
00436
00437
00438
00439
00440
00441
00442 if (qh->channel) {
00443 frame = calc_starting_frame(hcd, qh, &skip_frames);
00444
00445
00446
00447
00448 fr_idx_tmp = frame_list_idx(frame);
00449 fr_idx = (MAX_FRLIST_EN_NUM + frame_list_idx(qh->sched_frame) - fr_idx_tmp)
00450 % frame_incr_val(qh);
00451 fr_idx = (fr_idx + fr_idx_tmp) % MAX_FRLIST_EN_NUM;
00452 }
00453 else {
00454 qh->sched_frame = calc_starting_frame(hcd, qh, &skip_frames);
00455 fr_idx = frame_list_idx(qh->sched_frame);
00456 }
00457
00458 qh->td_first = qh->td_last = frame_to_desc_idx(qh, fr_idx);
00459
00460 return skip_frames;
00461 }
00462
00463 #define ISOC_URB_GIVEBACK_ASAP
00464
00465 #define MAX_ISOC_XFER_SIZE_FS 1023
00466 #define MAX_ISOC_XFER_SIZE_HS 3072
00467 #define DESCNUM_THRESHOLD 4
00468
00469 static void init_isoc_dma_desc(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh, uint8_t skip_frames)
00470 {
00471 struct dwc_otg_hcd_iso_packet_desc *frame_desc;
00472 dwc_otg_qtd_t *qtd;
00473 dwc_otg_host_dma_desc_t *dma_desc;
00474 uint16_t idx, inc, n_desc, ntd_max, max_xfer_size;
00475
00476 idx = qh->td_last;
00477 inc = qh->interval;
00478 n_desc = 0;
00479
00480 ntd_max = (max_desc_num(qh) + qh->interval - 1) / qh->interval;
00481 if (skip_frames && !qh->channel)
00482 ntd_max = ntd_max - skip_frames / qh->interval;
00483
00484 max_xfer_size = (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH) ? MAX_ISOC_XFER_SIZE_HS
00485 : MAX_ISOC_XFER_SIZE_FS;
00486
00487 DWC_CIRCLEQ_FOREACH(qtd, &qh->qtd_list, qtd_list_entry) {
00488 while ((qh->ntd < ntd_max) && (qtd->isoc_frame_index_last < qtd->urb->packet_count)) {
00489
00490 dma_desc = &qh->desc_list[idx];
00491 dwc_memset(dma_desc, 0x00, sizeof(dwc_otg_host_dma_desc_t));
00492
00493 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index_last];
00494
00495 if (frame_desc->length > max_xfer_size)
00496 qh->n_bytes[idx] = max_xfer_size;
00497 else
00498 qh->n_bytes[idx] = frame_desc->length;
00499 dma_desc->status.b_isoc.n_bytes = qh->n_bytes[idx];
00500 dma_desc->status.b_isoc.a = 1;
00501
00502 dma_desc->buf = qtd->urb->dma + frame_desc->offset;
00503
00504 qh->ntd++;
00505
00506 qtd->isoc_frame_index_last++;
00507
00508 #ifdef ISOC_URB_GIVEBACK_ASAP
00509
00510
00511
00512
00513 if (qtd->isoc_frame_index_last == qtd->urb->packet_count)
00514 dma_desc->status.b_isoc.ioc = 1;
00515
00516 #endif
00517 idx = desclist_idx_inc(idx, inc, qh->dev_speed);
00518 n_desc++;
00519
00520 }
00521 qtd->in_process = 1;
00522 }
00523
00524 qh->td_last = idx;
00525
00526 #ifdef ISOC_URB_GIVEBACK_ASAP
00527
00528 if (qh->ntd == ntd_max) {
00529 idx = desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
00530 qh->desc_list[idx].status.b_isoc.ioc = 1;
00531 }
00532 #else
00533
00534
00535
00536
00537
00538
00539
00540 if (n_desc > DESCNUM_THRESHOLD) {
00541
00542
00543
00544
00545
00546
00547
00548
00549 idx = dwc_desclist_idx_dec(idx, inc * ((qh->ntd + 1) / 2), qh->dev_speed);
00550 }
00551 else {
00552
00553
00554
00555
00556
00557 idx = dwc_desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
00558 }
00559
00560 qh->desc_list[idx].status.b_isoc.ioc = 1;
00561 #endif
00562 }
00563
00564
00565 static void init_non_isoc_dma_desc(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00566 {
00567
00568 dwc_hc_t *hc;
00569 dwc_otg_host_dma_desc_t *dma_desc;
00570 dwc_otg_qtd_t *qtd;
00571 int num_packets, len, n_desc = 0;
00572
00573 hc = qh->channel;
00574
00575
00576
00577
00578
00579
00580
00581
00582 DWC_CIRCLEQ_FOREACH(qtd, &qh->qtd_list, qtd_list_entry) {
00583
00584 if (n_desc) {
00585
00586 hc->xfer_buff = (uint8_t *)qtd->urb->dma + qtd->urb->actual_length;
00587 hc->xfer_len = qtd->urb->length - qtd->urb->actual_length;
00588 }
00589
00590 qtd->n_desc = 0;
00591
00592 do {
00593 dma_desc = &qh->desc_list[n_desc];
00594 len = hc->xfer_len;
00595
00596
00597 if (len > MAX_DMA_DESC_SIZE)
00598 len = MAX_DMA_DESC_SIZE - hc->max_packet + 1;
00599
00600 if (hc->ep_is_in) {
00601 if (len > 0) {
00602 num_packets = (len + hc->max_packet - 1) / hc->max_packet;
00603 }
00604 else {
00605
00606 num_packets = 1;
00607 }
00608
00609 len = num_packets * hc->max_packet;
00610 }
00611
00612 dma_desc->status.b.n_bytes = len;
00613
00614 qh->n_bytes[n_desc] = len;
00615
00616
00617 if ((qh->ep_type == UE_CONTROL) && (qtd->control_phase == DWC_OTG_CONTROL_SETUP))
00618 dma_desc->status.b.sup = 1;
00619
00620 dma_desc->status.b.a = 1;
00621
00622 dma_desc->buf = (uint32_t) hc->xfer_buff;
00623
00624
00625
00626
00627
00628 if (len > hc->xfer_len) {
00629 hc->xfer_len = 0;
00630 }
00631 else {
00632 hc->xfer_buff += len;
00633 hc->xfer_len -= len;
00634 }
00635
00636 qtd->n_desc++;
00637 n_desc++;
00638 }
00639 while ((hc->xfer_len > 0) && (n_desc != MAX_DMA_DESC_NUM_GENERIC));
00640
00641
00642 qtd->in_process = 1;
00643
00644 if (n_desc == MAX_DMA_DESC_NUM_GENERIC)
00645 break;
00646 }
00647
00648 if (n_desc) {
00649
00650 qh->desc_list[n_desc-1].status.b.ioc = 1;
00651
00652 qh->desc_list[n_desc-1].status.b.eol = 1;
00653
00654 hc->ntd = n_desc;
00655 }
00656 }
00657
00675 void dwc_otg_hcd_start_xfer_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00676 {
00677
00678 dwc_hc_t *hc = qh->channel;
00679 uint8_t skip_frames = 0;
00680
00681 switch (hc->ep_type) {
00682 case DWC_OTG_EP_TYPE_CONTROL:
00683 case DWC_OTG_EP_TYPE_BULK:
00684 init_non_isoc_dma_desc(hcd, qh);
00685
00686 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00687 break;
00688 case DWC_OTG_EP_TYPE_INTR:
00689 init_non_isoc_dma_desc(hcd, qh);
00690
00691 update_frame_list(hcd, qh, 1);
00692
00693 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00694 break;
00695 case DWC_OTG_EP_TYPE_ISOC:
00696
00697 if(!qh->ntd)
00698 skip_frames = recalc_initial_desc_idx(hcd, qh);
00699
00700 init_isoc_dma_desc(hcd, qh, skip_frames);
00701
00702 if (!hc->xfer_started) {
00703
00704 update_frame_list(hcd, qh, 1);
00705
00706
00707
00708
00709
00710
00711
00712 hc->ntd = max_desc_num(qh);
00713
00714 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00715 }
00716
00717 break;
00718 default:
00719
00720 break;
00721 }
00722 }
00723
00724 static void complete_isoc_xfer_ddma(dwc_otg_hcd_t *hcd,
00725 dwc_hc_t *hc,
00726 dwc_otg_hc_regs_t *hc_regs,
00727 dwc_otg_halt_status_e halt_status)
00728 {
00729 struct dwc_otg_hcd_iso_packet_desc *frame_desc;
00730 dwc_otg_qtd_t *qtd, *qtd_tmp;
00731 dwc_otg_qh_t *qh;
00732 dwc_otg_host_dma_desc_t *dma_desc;
00733 uint16_t idx, remain;
00734 uint8_t urb_compl;
00735
00736 qh = hc->qh;
00737 idx = qh->td_first;
00738
00739
00740 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00741 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry)
00742 qtd->in_process = 0;
00743 return;
00744 }
00745 else if ((halt_status == DWC_OTG_HC_XFER_AHB_ERR) ||
00746 (halt_status == DWC_OTG_HC_XFER_BABBLE_ERR)) {
00747
00748
00749
00750
00751
00752
00753
00754
00755
00756 int err = (halt_status == DWC_OTG_HC_XFER_AHB_ERR)
00757 ? (-DWC_E_IO)
00758 : (-DWC_E_OVERFLOW);
00759
00760 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00761 for(idx = 0; idx < qtd->urb->packet_count; idx++) {
00762 frame_desc = &qtd->urb->iso_descs[idx];
00763 frame_desc->status = err;
00764 }
00765 hcd->fops->complete(hcd, qtd->urb->priv, qtd->urb, err);
00766 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00767 }
00768 return;
00769 }
00770
00771
00772 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00773
00774 if (!qtd->in_process)
00775 break;
00776
00777 urb_compl = 0;
00778
00779 do {
00780
00781 dma_desc = &qh->desc_list[idx];
00782
00783 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index];
00784 remain = hc->ep_is_in ? dma_desc->status.b_isoc.n_bytes : 0;
00785
00786 if (dma_desc->status.b_isoc.sts == DMA_DESC_STS_PKTERR) {
00787
00788
00789
00790
00791
00792 qtd->urb->error_count++;
00793 frame_desc->actual_length = qh->n_bytes[idx] - remain;
00794 frame_desc->status = -DWC_E_PROTOCOL;
00795 }
00796 else {
00797
00798
00799 frame_desc->actual_length = qh->n_bytes[idx] - remain;
00800 frame_desc->status = 0;
00801 }
00802
00803 if (++qtd->isoc_frame_index == qtd->urb->packet_count) {
00804
00805
00806
00807
00808
00809 hcd->fops->complete(hcd, qtd->urb->priv, qtd->urb, 0);
00810 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00811
00812
00813
00814
00815
00816
00817
00818 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00819 return;
00820 }
00821
00822 urb_compl = 1;
00823
00824 }
00825
00826 qh->ntd--;
00827
00828
00829 if (dma_desc->status.b_isoc.ioc) {
00830 idx = desclist_idx_inc(idx, qh->interval, hc->speed);
00831 goto stop_scan;
00832 }
00833
00834 idx = desclist_idx_inc(idx, qh->interval, hc->speed);
00835
00836 if (urb_compl)
00837 break;
00838 }
00839 while(idx != qh->td_first);
00840 }
00841 stop_scan:
00842 qh->td_first = idx;
00843 }
00844
00845 uint8_t update_non_isoc_urb_state_ddma(dwc_otg_hcd_t * hcd,
00846 dwc_hc_t * hc,
00847 dwc_otg_qtd_t * qtd,
00848 dwc_otg_host_dma_desc_t * dma_desc,
00849 dwc_otg_halt_status_e halt_status,
00850 uint32_t n_bytes,
00851 uint8_t *xfer_done)
00852 {
00853
00854 uint16_t remain = hc->ep_is_in ? dma_desc->status.b.n_bytes : 0;
00855 dwc_otg_hcd_urb_t *urb = qtd->urb;
00856
00857
00858 if (halt_status == DWC_OTG_HC_XFER_AHB_ERR) {
00859 urb->status = -DWC_E_IO;
00860 return 1;
00861 }
00862 if (dma_desc->status.b.sts == DMA_DESC_STS_PKTERR) {
00863 switch (halt_status) {
00864 case DWC_OTG_HC_XFER_STALL:
00865 urb->status = -DWC_E_PIPE;
00866 break;
00867 case DWC_OTG_HC_XFER_BABBLE_ERR:
00868 urb->status = -DWC_E_OVERFLOW;
00869 break;
00870 case DWC_OTG_HC_XFER_XACT_ERR:
00871 urb->status = -DWC_E_PROTOCOL;
00872 break;
00873 default:
00874 DWC_ERROR("%s: Unhandled descriptor error status (%d)\n", __func__,
00875 halt_status);
00876 break;
00877 }
00878 return 1;
00879 }
00880
00881 if (dma_desc->status.b.a == 1) {
00882 DWC_DEBUGPL(DBG_HCDV, "Active descriptor encountered on channel %d\n", hc->hc_num);
00883 return 0;
00884 }
00885
00886 if (hc->ep_type == DWC_OTG_EP_TYPE_CONTROL) {
00887 if (qtd->control_phase == DWC_OTG_CONTROL_DATA) {
00888 urb->actual_length += n_bytes - remain;
00889 if (remain || urb->actual_length == urb->length) {
00890
00891
00892
00893
00894 *xfer_done = 1;
00895 }
00896
00897 }
00898 else if (qtd->control_phase == DWC_OTG_CONTROL_STATUS) {
00899 urb->status = 0;
00900 *xfer_done = 1;
00901 }
00902
00903
00904 }
00905 else {
00906
00907 urb->actual_length += n_bytes - remain;
00908 if (remain || urb->actual_length == urb->length) {
00909 urb->status = 0;
00910 *xfer_done = 1;
00911 }
00912 }
00913
00914 return 0;
00915 }
00916
00917 static void complete_non_isoc_xfer_ddma(dwc_otg_hcd_t * hcd,
00918 dwc_hc_t * hc,
00919 dwc_otg_hc_regs_t * hc_regs,
00920 dwc_otg_halt_status_e halt_status)
00921 {
00922 dwc_otg_hcd_urb_t *urb = NULL;
00923 dwc_otg_qtd_t *qtd, *qtd_tmp;
00924 dwc_otg_qh_t *qh;
00925 dwc_otg_host_dma_desc_t *dma_desc;
00926 uint32_t n_bytes, n_desc, i;
00927 uint8_t failed = 0, xfer_done;
00928
00929 n_desc = 0;
00930
00931 qh = hc->qh;
00932
00933
00934 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00935 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00936 qtd->in_process = 0;
00937 }
00938 return;
00939 }
00940
00941 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &qh->qtd_list, qtd_list_entry) {
00942
00943 urb = qtd->urb;
00944
00945 n_bytes = 0;
00946 xfer_done = 0;
00947
00948 for (i = 0; i < qtd->n_desc; i++) {
00949 dma_desc = &qh->desc_list[n_desc];
00950
00951 n_bytes = qh->n_bytes[n_desc];
00952
00953
00954 failed = update_non_isoc_urb_state_ddma(hcd, hc, qtd, dma_desc,
00955 halt_status, n_bytes, &xfer_done);
00956
00957 if (failed || (xfer_done && (urb->status != -DWC_E_IN_PROGRESS))) {
00958
00959 hcd->fops->complete(hcd, urb->priv, urb, urb->status);
00960 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00961
00962 if (failed)
00963 goto stop_scan;
00964 }
00965 else if (qh->ep_type == UE_CONTROL) {
00966 if (qtd->control_phase == DWC_OTG_CONTROL_SETUP) {
00967 if (urb->length > 0) {
00968 qtd->control_phase = DWC_OTG_CONTROL_DATA;
00969 } else {
00970 qtd->control_phase = DWC_OTG_CONTROL_STATUS;
00971 }
00972 DWC_DEBUGPL(DBG_HCDV, " Control setup transaction done\n");
00973 }
00974 else if(qtd->control_phase == DWC_OTG_CONTROL_DATA) {
00975 if (xfer_done) {
00976 qtd->control_phase = DWC_OTG_CONTROL_STATUS;
00977 DWC_DEBUGPL(DBG_HCDV, " Control data transfer done\n");
00978 } else if (i+1 == qtd->n_desc){
00979
00980
00981
00982
00983 dwc_otg_hcd_save_data_toggle(hc, hc_regs, qtd);
00984 }
00985 }
00986 }
00987
00988 n_desc++;
00989 }
00990
00991 }
00992
00993 stop_scan:
00994
00995 if (qh->ep_type != UE_CONTROL) {
00996
00997
00998
00999
01000 if (halt_status == DWC_OTG_HC_XFER_STALL) {
01001 qh->data_toggle = DWC_OTG_HC_PID_DATA0;
01002 }
01003 else {
01004 dwc_otg_hcd_save_data_toggle(hc, hc_regs, qtd);
01005 }
01006 }
01007
01008 if (halt_status == DWC_OTG_HC_XFER_COMPLETE) {
01009 hcint_data_t hcint;
01010 hcint.d32 = dwc_read_reg32(&hc_regs->hcint);
01011 if (hcint.b.nyet) {
01012
01013
01014
01015
01016
01017 qh->ping_state = 1;
01018 clear_hc_int(hc_regs, nyet);
01019 }
01020
01021 }
01022
01023 }
01024
01042 void dwc_otg_hcd_complete_xfer_ddma(dwc_otg_hcd_t *hcd,
01043 dwc_hc_t *hc,
01044 dwc_otg_hc_regs_t *hc_regs,
01045 dwc_otg_halt_status_e halt_status)
01046 {
01047 uint8_t continue_isoc_xfer = 0;
01048 dwc_otg_transaction_type_e tr_type;
01049 dwc_otg_qh_t *qh = hc->qh;
01050
01051 if (hc->ep_type == DWC_OTG_EP_TYPE_ISOC) {
01052
01053 complete_isoc_xfer_ddma(hcd, hc, hc_regs, halt_status);
01054
01055
01056 if (halt_status != DWC_OTG_HC_XFER_COMPLETE ||
01057 DWC_CIRCLEQ_EMPTY(&qh->qtd_list)) {
01058
01059
01060 if (halt_status == DWC_OTG_HC_XFER_COMPLETE) {
01061 dwc_otg_hc_halt(hcd->core_if, hc, halt_status);
01062 }
01063
01064 release_channel_ddma(hcd, qh);
01065 dwc_otg_hcd_qh_remove(hcd, qh);
01066 }
01067 else {
01068
01069 DWC_LIST_MOVE_HEAD(&hcd->periodic_sched_assigned,
01070 &qh->qh_list_entry);
01071 continue_isoc_xfer = 1;
01072
01073 }
01077 }
01078 else {
01079
01080 complete_non_isoc_xfer_ddma(hcd, hc, hc_regs, halt_status);
01081
01082 release_channel_ddma(hcd, qh);
01083
01084 dwc_otg_hcd_qh_remove(hcd, qh);
01085
01086 if (!DWC_CIRCLEQ_EMPTY(&qh->qtd_list)) {
01087
01088 dwc_otg_hcd_qh_add(hcd, qh);
01089 }
01090
01091
01092 }
01093 tr_type = dwc_otg_hcd_select_transactions(hcd);
01094 if (tr_type != DWC_OTG_TRANSACTION_NONE || continue_isoc_xfer) {
01095 if (continue_isoc_xfer) {
01096 if (tr_type == DWC_OTG_TRANSACTION_NONE) {
01097 tr_type = DWC_OTG_TRANSACTION_PERIODIC;
01098 } else if (tr_type == DWC_OTG_TRANSACTION_NON_PERIODIC) {
01099 tr_type = DWC_OTG_TRANSACTION_ALL;
01100 }
01101 }
01102 dwc_otg_hcd_queue_transactions(hcd, tr_type);
01103 }
01104 }
01105
01106 #endif