Lines Matching refs:qh
80 struct dwc2_qh *qh) in dwc2_check_periodic_bandwidth() argument
87 if (qh->dev_speed == USB_SPEED_HIGH || qh->do_split) { in dwc2_check_periodic_bandwidth()
92 max_claimed_usecs = 100 - qh->host_us; in dwc2_check_periodic_bandwidth()
98 max_claimed_usecs = 900 - qh->host_us; in dwc2_check_periodic_bandwidth()
104 __func__, hsotg->periodic_usecs, qh->host_us); in dwc2_check_periodic_bandwidth()
345 struct dwc2_qh *qh) in dwc2_get_ls_map() argument
350 if (WARN_ON(!qh->dwc_tt)) in dwc2_get_ls_map()
354 map = qh->dwc_tt->periodic_bitmaps; in dwc2_get_ls_map()
355 if (qh->dwc_tt->usb_tt->multi) in dwc2_get_ls_map()
356 map += DWC2_ELEMENTS_PER_LS_BITMAP * (qh->ttport - 1); in dwc2_get_ls_map()
463 struct dwc2_qh *qh; member
476 dwc2_sch_dbg(print_data->hsotg, "QH=%p ...%s\n", print_data->qh, str); in dwc2_qh_print()
486 struct dwc2_qh *qh) in dwc2_qh_schedule_print() argument
488 struct dwc2_qh_print_data print_data = { hsotg, qh }; in dwc2_qh_schedule_print()
497 if (qh->schedule_low_speed) { in dwc2_qh_schedule_print()
498 unsigned long *map = dwc2_get_ls_map(hsotg, qh); in dwc2_qh_schedule_print()
501 qh, qh->device_us, in dwc2_qh_schedule_print()
502 DWC2_ROUND_US_TO_SLICE(qh->device_us), in dwc2_qh_schedule_print()
503 DWC2_US_PER_SLICE * qh->ls_start_schedule_slice); in dwc2_qh_schedule_print()
508 qh, map); in dwc2_qh_schedule_print()
515 for (i = 0; i < qh->num_hs_transfers; i++) { in dwc2_qh_schedule_print()
516 struct dwc2_hs_transfer_time *trans_time = qh->hs_transfers + i; in dwc2_qh_schedule_print()
524 qh, i, trans_time->duration_us, uframe, rel_us); in dwc2_qh_schedule_print()
526 if (qh->num_hs_transfers) { in dwc2_qh_schedule_print()
527 dwc2_sch_dbg(hsotg, "QH=%p Whole high speed map now:\n", qh); in dwc2_qh_schedule_print()
536 struct dwc2_qh *qh) {}; in dwc2_qh_schedule_print() argument
554 static int dwc2_ls_pmap_schedule(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh, in dwc2_ls_pmap_schedule() argument
557 int slices = DIV_ROUND_UP(qh->device_us, DWC2_US_PER_SLICE); in dwc2_ls_pmap_schedule()
558 unsigned long *map = dwc2_get_ls_map(hsotg, qh); in dwc2_ls_pmap_schedule()
578 qh->device_interval, search_slice, false); in dwc2_ls_pmap_schedule()
583 qh->ls_start_schedule_slice = slice; in dwc2_ls_pmap_schedule()
594 struct dwc2_qh *qh) in dwc2_ls_pmap_unschedule() argument
596 int slices = DIV_ROUND_UP(qh->device_us, DWC2_US_PER_SLICE); in dwc2_ls_pmap_unschedule()
597 unsigned long *map = dwc2_get_ls_map(hsotg, qh); in dwc2_ls_pmap_unschedule()
604 DWC2_LS_SCHEDULE_FRAMES, slices, qh->device_interval, in dwc2_ls_pmap_unschedule()
605 qh->ls_start_schedule_slice); in dwc2_ls_pmap_unschedule()
628 static int dwc2_hs_pmap_schedule(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh, in dwc2_hs_pmap_schedule() argument
631 struct dwc2_hs_transfer_time *trans_time = qh->hs_transfers + index; in dwc2_hs_pmap_schedule()
637 qh->host_interval, trans_time->start_schedule_us, in dwc2_hs_pmap_schedule()
655 struct dwc2_qh *qh, int index) in dwc2_hs_pmap_unschedule() argument
657 struct dwc2_hs_transfer_time *trans_time = qh->hs_transfers + index; in dwc2_hs_pmap_unschedule()
662 qh->host_interval, trans_time->start_schedule_us); in dwc2_hs_pmap_unschedule()
679 struct dwc2_qh *qh) in dwc2_uframe_schedule_split() argument
681 int bytecount = qh->maxp_mult * qh->maxp; in dwc2_uframe_schedule_split()
690 host_interval_in_sched = gcd(qh->host_interval, in dwc2_uframe_schedule_split()
718 if (qh->schedule_low_speed) { in dwc2_uframe_schedule_split()
719 err = dwc2_ls_pmap_schedule(hsotg, qh, ls_search_slice); in dwc2_uframe_schedule_split()
738 start_s_uframe = qh->ls_start_schedule_slice / in dwc2_uframe_schedule_split()
753 if (qh->schedule_low_speed) in dwc2_uframe_schedule_split()
754 dwc2_ls_pmap_unschedule(hsotg, qh); in dwc2_uframe_schedule_split()
756 (qh->ls_start_schedule_slice / in dwc2_uframe_schedule_split()
796 if (qh->ep_type == USB_ENDPOINT_XFER_ISOC && !qh->ep_is_in) in dwc2_uframe_schedule_split()
803 DIV_ROUND_UP(188 * (qh->ls_start_schedule_slice % in dwc2_uframe_schedule_split()
825 if (!qh->ep_is_in && in dwc2_uframe_schedule_split()
829 qh, first_data_bytes, bytecount); in dwc2_uframe_schedule_split()
830 if (qh->schedule_low_speed) in dwc2_uframe_schedule_split()
831 dwc2_ls_pmap_unschedule(hsotg, qh); in dwc2_uframe_schedule_split()
838 qh->num_hs_transfers = 1 + DIV_ROUND_UP(other_data_bytes, 188); in dwc2_uframe_schedule_split()
845 if (qh->ep_type == USB_ENDPOINT_XFER_INT) { in dwc2_uframe_schedule_split()
847 qh->num_hs_transfers += 2; in dwc2_uframe_schedule_split()
849 qh->num_hs_transfers += 3; in dwc2_uframe_schedule_split()
851 if (qh->ep_is_in) { in dwc2_uframe_schedule_split()
870 if (qh->ep_is_in) { in dwc2_uframe_schedule_split()
874 qh->num_hs_transfers++; in dwc2_uframe_schedule_split()
877 last = rel_uframe + qh->num_hs_transfers + 1; in dwc2_uframe_schedule_split()
881 qh->num_hs_transfers += 2; in dwc2_uframe_schedule_split()
883 qh->num_hs_transfers += 1; in dwc2_uframe_schedule_split()
887 qh->num_hs_transfers--; in dwc2_uframe_schedule_split()
903 qh->hs_transfers[0].duration_us = HS_USECS_ISO(first_count); in dwc2_uframe_schedule_split()
904 for (i = 1; i < qh->num_hs_transfers - 1; i++) in dwc2_uframe_schedule_split()
905 qh->hs_transfers[i].duration_us = in dwc2_uframe_schedule_split()
907 if (qh->num_hs_transfers > 1) in dwc2_uframe_schedule_split()
908 qh->hs_transfers[qh->num_hs_transfers - 1].duration_us = in dwc2_uframe_schedule_split()
916 qh->hs_transfers[0].start_schedule_us = in dwc2_uframe_schedule_split()
918 for (i = 1; i < qh->num_hs_transfers; i++) in dwc2_uframe_schedule_split()
919 qh->hs_transfers[i].start_schedule_us = in dwc2_uframe_schedule_split()
925 for (i = 0; i < qh->num_hs_transfers; i++) { in dwc2_uframe_schedule_split()
926 err = dwc2_hs_pmap_schedule(hsotg, qh, true, i); in dwc2_uframe_schedule_split()
932 if (i == qh->num_hs_transfers) in dwc2_uframe_schedule_split()
936 dwc2_hs_pmap_unschedule(hsotg, qh, i); in dwc2_uframe_schedule_split()
938 if (qh->schedule_low_speed) in dwc2_uframe_schedule_split()
939 dwc2_ls_pmap_unschedule(hsotg, qh); in dwc2_uframe_schedule_split()
960 static int dwc2_uframe_schedule_hs(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_uframe_schedule_hs() argument
963 WARN_ON(qh->host_us != qh->device_us); in dwc2_uframe_schedule_hs()
964 WARN_ON(qh->host_interval != qh->device_interval); in dwc2_uframe_schedule_hs()
965 WARN_ON(qh->num_hs_transfers != 1); in dwc2_uframe_schedule_hs()
968 qh->hs_transfers[0].start_schedule_us = 0; in dwc2_uframe_schedule_hs()
969 qh->hs_transfers[0].duration_us = qh->host_us; in dwc2_uframe_schedule_hs()
971 return dwc2_hs_pmap_schedule(hsotg, qh, false, 0); in dwc2_uframe_schedule_hs()
983 static int dwc2_uframe_schedule_ls(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_uframe_schedule_ls() argument
986 WARN_ON(qh->host_us != qh->device_us); in dwc2_uframe_schedule_ls()
987 WARN_ON(qh->host_interval != qh->device_interval); in dwc2_uframe_schedule_ls()
988 WARN_ON(!qh->schedule_low_speed); in dwc2_uframe_schedule_ls()
991 return dwc2_ls_pmap_schedule(hsotg, qh, 0); in dwc2_uframe_schedule_ls()
1003 static int dwc2_uframe_schedule(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_uframe_schedule() argument
1007 if (qh->dev_speed == USB_SPEED_HIGH) in dwc2_uframe_schedule()
1008 ret = dwc2_uframe_schedule_hs(hsotg, qh); in dwc2_uframe_schedule()
1009 else if (!qh->do_split) in dwc2_uframe_schedule()
1010 ret = dwc2_uframe_schedule_ls(hsotg, qh); in dwc2_uframe_schedule()
1012 ret = dwc2_uframe_schedule_split(hsotg, qh); in dwc2_uframe_schedule()
1015 dwc2_sch_dbg(hsotg, "QH=%p Failed to schedule %d\n", qh, ret); in dwc2_uframe_schedule()
1017 dwc2_qh_schedule_print(hsotg, qh); in dwc2_uframe_schedule()
1028 static void dwc2_uframe_unschedule(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_uframe_unschedule() argument
1032 for (i = 0; i < qh->num_hs_transfers; i++) in dwc2_uframe_unschedule()
1033 dwc2_hs_pmap_unschedule(hsotg, qh, i); in dwc2_uframe_unschedule()
1035 if (qh->schedule_low_speed) in dwc2_uframe_unschedule()
1036 dwc2_ls_pmap_unschedule(hsotg, qh); in dwc2_uframe_unschedule()
1038 dwc2_sch_dbg(hsotg, "QH=%p Unscheduled\n", qh); in dwc2_uframe_unschedule()
1056 static void dwc2_pick_first_frame(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_pick_first_frame() argument
1083 if (qh->do_split) in dwc2_pick_first_frame()
1089 if (qh->dev_speed == USB_SPEED_HIGH || qh->do_split) { in dwc2_pick_first_frame()
1096 WARN_ON(qh->num_hs_transfers < 1); in dwc2_pick_first_frame()
1098 relative_frame = qh->hs_transfers[0].start_schedule_us / in dwc2_pick_first_frame()
1102 interval = gcd(qh->host_interval, DWC2_HS_SCHEDULE_UFRAMES); in dwc2_pick_first_frame()
1112 relative_frame = qh->ls_start_schedule_slice / in dwc2_pick_first_frame()
1114 interval = gcd(qh->host_interval, DWC2_LS_SCHEDULE_FRAMES); in dwc2_pick_first_frame()
1153 qh->next_active_frame = next_active_frame; in dwc2_pick_first_frame()
1154 qh->start_active_frame = next_active_frame; in dwc2_pick_first_frame()
1157 qh, frame_number, qh->next_active_frame); in dwc2_pick_first_frame()
1171 static int dwc2_do_reserve(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_do_reserve() argument
1176 status = dwc2_uframe_schedule(hsotg, qh); in dwc2_do_reserve()
1186 status = dwc2_check_periodic_bandwidth(hsotg, qh); in dwc2_do_reserve()
1201 hsotg->periodic_usecs += qh->host_us; in dwc2_do_reserve()
1203 dwc2_pick_first_frame(hsotg, qh); in dwc2_do_reserve()
1217 static void dwc2_do_unreserve(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_do_unreserve() argument
1221 WARN_ON(!qh->unreserve_pending); in dwc2_do_unreserve()
1224 qh->unreserve_pending = false; in dwc2_do_unreserve()
1226 if (WARN_ON(!list_empty(&qh->qh_list_entry))) in dwc2_do_unreserve()
1227 list_del_init(&qh->qh_list_entry); in dwc2_do_unreserve()
1230 hsotg->periodic_usecs -= qh->host_us; in dwc2_do_unreserve()
1233 dwc2_uframe_unschedule(hsotg, qh); in dwc2_do_unreserve()
1254 struct dwc2_qh *qh = from_timer(qh, t, unreserve_timer); in dwc2_unreserve_timer_fn() local
1255 struct dwc2_hsotg *hsotg = qh->hsotg; in dwc2_unreserve_timer_fn()
1268 if (timer_pending(&qh->unreserve_timer)) in dwc2_unreserve_timer_fn()
1282 if (qh->unreserve_pending) in dwc2_unreserve_timer_fn()
1283 dwc2_do_unreserve(hsotg, qh); in dwc2_unreserve_timer_fn()
1299 struct dwc2_qh *qh) in dwc2_check_max_xfer_size() argument
1305 max_xfer_size = qh->maxp * qh->maxp_mult; in dwc2_check_max_xfer_size()
1328 static int dwc2_schedule_periodic(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_schedule_periodic() argument
1332 status = dwc2_check_max_xfer_size(hsotg, qh); in dwc2_schedule_periodic()
1341 if (del_timer(&qh->unreserve_timer)) in dwc2_schedule_periodic()
1342 WARN_ON(!qh->unreserve_pending); in dwc2_schedule_periodic()
1351 if (!qh->unreserve_pending) { in dwc2_schedule_periodic()
1352 status = dwc2_do_reserve(hsotg, qh); in dwc2_schedule_periodic()
1362 if (dwc2_frame_num_le(qh->next_active_frame, in dwc2_schedule_periodic()
1364 dwc2_pick_first_frame(hsotg, qh); in dwc2_schedule_periodic()
1367 qh->unreserve_pending = 0; in dwc2_schedule_periodic()
1371 list_add_tail(&qh->qh_list_entry, &hsotg->periodic_sched_ready); in dwc2_schedule_periodic()
1374 list_add_tail(&qh->qh_list_entry, in dwc2_schedule_periodic()
1388 struct dwc2_qh *qh) in dwc2_deschedule_periodic() argument
1409 did_modify = mod_timer(&qh->unreserve_timer, in dwc2_deschedule_periodic()
1412 qh->unreserve_pending = 1; in dwc2_deschedule_periodic()
1414 list_del_init(&qh->qh_list_entry); in dwc2_deschedule_periodic()
1442 struct dwc2_qh *qh = container_of(t, struct dwc2_qh, wait_timer); in dwc2_wait_timer_fn() local
1443 struct dwc2_hsotg *hsotg = qh->hsotg; in dwc2_wait_timer_fn()
1452 if (!qh->wait_timer_cancel) { in dwc2_wait_timer_fn()
1455 qh->want_wait = false; in dwc2_wait_timer_fn()
1457 list_move(&qh->qh_list_entry, in dwc2_wait_timer_fn()
1478 static void dwc2_qh_init(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh, in dwc2_qh_init() argument
1496 qh->hsotg = hsotg; in dwc2_qh_init()
1497 timer_setup(&qh->unreserve_timer, dwc2_unreserve_timer_fn, 0); in dwc2_qh_init()
1498 hrtimer_init(&qh->wait_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in dwc2_qh_init()
1499 qh->wait_timer.function = &dwc2_wait_timer_fn; in dwc2_qh_init()
1500 qh->ep_type = ep_type; in dwc2_qh_init()
1501 qh->ep_is_in = ep_is_in; in dwc2_qh_init()
1503 qh->data_toggle = DWC2_HC_PID_DATA0; in dwc2_qh_init()
1504 qh->maxp = maxp; in dwc2_qh_init()
1505 qh->maxp_mult = maxp_mult; in dwc2_qh_init()
1506 INIT_LIST_HEAD(&qh->qtd_list); in dwc2_qh_init()
1507 INIT_LIST_HEAD(&qh->qh_list_entry); in dwc2_qh_init()
1509 qh->do_split = do_split; in dwc2_qh_init()
1510 qh->dev_speed = dev_speed; in dwc2_qh_init()
1517 &qh->ttport); in dwc2_qh_init()
1520 qh->dwc_tt = dwc_tt; in dwc2_qh_init()
1522 qh->host_us = NS_TO_US(usb_calc_bus_time(host_speed, ep_is_in, in dwc2_qh_init()
1529 qh->device_us = NS_TO_US(device_ns); in dwc2_qh_init()
1531 qh->device_interval = urb->interval; in dwc2_qh_init()
1532 qh->host_interval = urb->interval * (do_split ? 8 : 1); in dwc2_qh_init()
1539 qh->schedule_low_speed = prtspd != HPRT0_SPD_HIGH_SPEED || in dwc2_qh_init()
1544 qh->num_hs_transfers = -1; in dwc2_qh_init()
1546 qh->num_hs_transfers = 1; in dwc2_qh_init()
1548 qh->num_hs_transfers = 0; in dwc2_qh_init()
1569 switch (qh->ep_type) { in dwc2_qh_init()
1587 dwc2_sch_dbg(hsotg, "QH=%p Init %s, %s speed, %d bytes:\n", qh, type, in dwc2_qh_init()
1589 dwc2_sch_dbg(hsotg, "QH=%p ...addr=%d, ep=%d, %s\n", qh, in dwc2_qh_init()
1596 qh, qh->host_us, qh->device_us); in dwc2_qh_init()
1598 qh, qh->host_interval, qh->device_interval); in dwc2_qh_init()
1599 if (qh->schedule_low_speed) in dwc2_qh_init()
1601 qh, dwc2_get_ls_map(hsotg, qh)); in dwc2_qh_init()
1619 struct dwc2_qh *qh; in dwc2_hcd_qh_create() local
1625 qh = kzalloc(sizeof(*qh), mem_flags); in dwc2_hcd_qh_create()
1626 if (!qh) in dwc2_hcd_qh_create()
1629 dwc2_qh_init(hsotg, qh, urb, mem_flags); in dwc2_hcd_qh_create()
1632 dwc2_hcd_qh_init_ddma(hsotg, qh, mem_flags) < 0) { in dwc2_hcd_qh_create()
1633 dwc2_hcd_qh_free(hsotg, qh); in dwc2_hcd_qh_create()
1637 return qh; in dwc2_hcd_qh_create()
1651 void dwc2_hcd_qh_free(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_hcd_qh_free() argument
1654 if (del_timer_sync(&qh->unreserve_timer)) { in dwc2_hcd_qh_free()
1658 dwc2_do_unreserve(hsotg, qh); in dwc2_hcd_qh_free()
1669 hrtimer_cancel(&qh->wait_timer); in dwc2_hcd_qh_free()
1671 dwc2_host_put_tt_info(hsotg, qh->dwc_tt); in dwc2_hcd_qh_free()
1673 if (qh->desc_list) in dwc2_hcd_qh_free()
1674 dwc2_hcd_qh_free_ddma(hsotg, qh); in dwc2_hcd_qh_free()
1675 else if (hsotg->unaligned_cache && qh->dw_align_buf) in dwc2_hcd_qh_free()
1676 kmem_cache_free(hsotg->unaligned_cache, qh->dw_align_buf); in dwc2_hcd_qh_free()
1678 kfree(qh); in dwc2_hcd_qh_free()
1691 int dwc2_hcd_qh_add(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_hcd_qh_add() argument
1697 if (dbg_qh(qh)) in dwc2_hcd_qh_add()
1700 if (!list_empty(&qh->qh_list_entry)) in dwc2_hcd_qh_add()
1705 if (dwc2_qh_is_non_per(qh)) { in dwc2_hcd_qh_add()
1707 qh->start_active_frame = hsotg->frame_number; in dwc2_hcd_qh_add()
1708 qh->next_active_frame = qh->start_active_frame; in dwc2_hcd_qh_add()
1710 if (qh->want_wait) { in dwc2_hcd_qh_add()
1711 list_add_tail(&qh->qh_list_entry, in dwc2_hcd_qh_add()
1713 qh->wait_timer_cancel = false; in dwc2_hcd_qh_add()
1715 hrtimer_start(&qh->wait_timer, delay, HRTIMER_MODE_REL); in dwc2_hcd_qh_add()
1717 list_add_tail(&qh->qh_list_entry, in dwc2_hcd_qh_add()
1723 status = dwc2_schedule_periodic(hsotg, qh); in dwc2_hcd_qh_add()
1743 void dwc2_hcd_qh_unlink(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh) in dwc2_hcd_qh_unlink() argument
1750 qh->wait_timer_cancel = true; in dwc2_hcd_qh_unlink()
1752 if (list_empty(&qh->qh_list_entry)) in dwc2_hcd_qh_unlink()
1756 if (dwc2_qh_is_non_per(qh)) { in dwc2_hcd_qh_unlink()
1757 if (hsotg->non_periodic_qh_ptr == &qh->qh_list_entry) in dwc2_hcd_qh_unlink()
1760 list_del_init(&qh->qh_list_entry); in dwc2_hcd_qh_unlink()
1764 dwc2_deschedule_periodic(hsotg, qh); in dwc2_hcd_qh_unlink()
1794 struct dwc2_qh *qh, u16 frame_number) in dwc2_next_for_periodic_split() argument
1796 u16 old_frame = qh->next_active_frame; in dwc2_next_for_periodic_split()
1807 if (old_frame == qh->start_active_frame && in dwc2_next_for_periodic_split()
1808 !(qh->ep_type == USB_ENDPOINT_XFER_ISOC && !qh->ep_is_in)) in dwc2_next_for_periodic_split()
1813 qh->next_active_frame = dwc2_frame_num_inc(old_frame, incr); in dwc2_next_for_periodic_split()
1823 if (dwc2_frame_num_gt(prev_frame_number, qh->next_active_frame)) { in dwc2_next_for_periodic_split()
1829 qh->next_active_frame); in dwc2_next_for_periodic_split()
1830 qh->next_active_frame = frame_number; in dwc2_next_for_periodic_split()
1857 struct dwc2_qh *qh, u16 frame_number) in dwc2_next_periodic_start() argument
1860 u16 interval = qh->host_interval; in dwc2_next_periodic_start()
1863 qh->start_active_frame = dwc2_frame_num_inc(qh->start_active_frame, in dwc2_next_periodic_start()
1903 if (qh->start_active_frame == qh->next_active_frame || in dwc2_next_periodic_start()
1904 dwc2_frame_num_gt(prev_frame_number, qh->start_active_frame)) { in dwc2_next_periodic_start()
1905 u16 ideal_start = qh->start_active_frame; in dwc2_next_periodic_start()
1912 if (qh->do_split || qh->dev_speed == USB_SPEED_HIGH) in dwc2_next_periodic_start()
1919 qh->start_active_frame = dwc2_frame_num_inc( in dwc2_next_periodic_start()
1920 qh->start_active_frame, interval); in dwc2_next_periodic_start()
1922 qh->start_active_frame)); in dwc2_next_periodic_start()
1924 missed = dwc2_frame_num_dec(qh->start_active_frame, in dwc2_next_periodic_start()
1929 qh->next_active_frame = qh->start_active_frame; in dwc2_next_periodic_start()
1947 void dwc2_hcd_qh_deactivate(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh, in dwc2_hcd_qh_deactivate() argument
1950 u16 old_frame = qh->next_active_frame; in dwc2_hcd_qh_deactivate()
1954 if (dbg_qh(qh)) in dwc2_hcd_qh_deactivate()
1957 if (dwc2_qh_is_non_per(qh)) { in dwc2_hcd_qh_deactivate()
1958 dwc2_hcd_qh_unlink(hsotg, qh); in dwc2_hcd_qh_deactivate()
1959 if (!list_empty(&qh->qtd_list)) in dwc2_hcd_qh_deactivate()
1961 dwc2_hcd_qh_add(hsotg, qh); in dwc2_hcd_qh_deactivate()
1974 missed = dwc2_next_for_periodic_split(hsotg, qh, frame_number); in dwc2_hcd_qh_deactivate()
1976 missed = dwc2_next_periodic_start(hsotg, qh, frame_number); in dwc2_hcd_qh_deactivate()
1980 qh, sched_next_periodic_split, frame_number, old_frame, in dwc2_hcd_qh_deactivate()
1981 qh->next_active_frame, in dwc2_hcd_qh_deactivate()
1982 dwc2_frame_num_dec(qh->next_active_frame, old_frame), in dwc2_hcd_qh_deactivate()
1985 if (list_empty(&qh->qtd_list)) { in dwc2_hcd_qh_deactivate()
1986 dwc2_hcd_qh_unlink(hsotg, qh); in dwc2_hcd_qh_deactivate()
1997 if (dwc2_frame_num_le(qh->next_active_frame, hsotg->frame_number)) in dwc2_hcd_qh_deactivate()
1998 list_move_tail(&qh->qh_list_entry, in dwc2_hcd_qh_deactivate()
2001 list_move_tail(&qh->qh_list_entry, in dwc2_hcd_qh_deactivate()
2049 struct dwc2_qh *qh) in dwc2_hcd_qtd_add() argument
2053 if (unlikely(!qh)) { in dwc2_hcd_qtd_add()
2059 retval = dwc2_hcd_qh_add(hsotg, qh); in dwc2_hcd_qtd_add()
2063 qtd->qh = qh; in dwc2_hcd_qtd_add()
2064 list_add_tail(&qtd->qtd_list_entry, &qh->qtd_list); in dwc2_hcd_qtd_add()