| /drivers/usb/gadget/function/ |
| A D | uvc_trace.h | 22 TP_PROTO(struct usb_request *req, u32 queued), 23 TP_ARGS(req, queued), 27 __field(u32, queued) 32 __entry->queued = queued; 37 __entry->queued) 41 TP_PROTO(struct usb_request *req, u32 queued), 42 TP_ARGS(req, queued) 46 TP_PROTO(struct usb_request *req, u32 queued), 47 TP_ARGS(req, queued)
|
| A D | uvc_video.c | 273 atomic_inc(&video->queued); in uvcg_video_ep_queue() 275 trace_uvcg_video_queue(req, atomic_read(&video->queued)); in uvcg_video_ep_queue() 340 atomic_dec(&video->queued); in uvc_video_complete() 414 trace_uvcg_video_complete(req, atomic_read(&video->queued)); in uvc_video_complete() 448 (atomic_read(&video->queued) > UVCG_REQ_MAX_ZERO_COUNT)) { in uvcg_video_hw_submit() 795 atomic_set(&video->queued, 0); in uvcg_video_enable()
|
| /drivers/net/wireless/mediatek/mt76/ |
| A D | debugfs.c | 67 i, q->queued, q->head, q->tail); in mt76_queues_read() 77 int i, queued; in mt76_rx_queues_read() local 83 queued = mt76_is_usb(dev) ? q->ndesc - q->queued : q->queued; in mt76_rx_queues_read() 85 i, queued, q->head, q->tail); in mt76_rx_queues_read()
|
| A D | sdio.c | 317 q->queued = 0; in mt76s_alloc_rx_queue() 372 if (q->queued > 0) { in mt76s_get_next_rx_entry() 375 q->queued--; in mt76s_get_next_rx_entry() 439 while (q->queued > 0) { in mt76s_process_tx_queue() 455 if (!q->queued) in mt76s_process_tx_queue() 529 if (q->queued == q->ndesc) in mt76s_tx_queue_skb() 544 q->queued++; in mt76s_tx_queue_skb() 562 if (q->queued == q->ndesc) { in mt76s_tx_queue_skb_raw() 575 q->queued++; in mt76s_tx_queue_skb_raw()
|
| A D | dma.c | 279 q->queued++; in mt76_dma_add_rx_buf() 343 q->queued++; in mt76_dma_add_buf() 396 while (q->queued > 0 && q->tail != last) { in mt76_dma_tx_cleanup() 417 if (!q->queued) in mt76_dma_tx_cleanup() 482 if (!q->queued) in mt76_dma_dequeue() 496 q->queued--; in mt76_dma_dequeue() 511 if (q->queued + 1 >= q->ndesc - 1) in mt76_dma_tx_queue_skb_raw() 590 if (q->queued + (tx_info.nbuf + 1) / 2 >= q->ndesc - 1) { in mt76_dma_tx_queue_skb() 644 while (q->queued < q->ndesc - 1) { in mt76_dma_rx_fill_buf()
|
| A D | usb.c | 444 if (q->queued > 0) { in mt76u_get_next_rx_entry() 447 q->queued--; in mt76u_get_next_rx_entry() 582 q->queued++; in mt76u_complete_rx() 654 q->queued = 0; in mt76u_submit_rx_buffers() 775 while (q->queued > 0) { in mt76u_status_worker() 785 if (!q->queued) in mt76u_status_worker() 864 if (q->queued == q->ndesc) in mt76u_tx_queue_skb() 882 q->queued++; in mt76u_tx_queue_skb() 1028 while (q->queued > 0) { in mt76u_stop_tx()
|
| A D | wed.c | 139 q->queued = q->head; in mt76_wed_dma_setup() 146 q->queued = q->head; in mt76_wed_dma_setup()
|
| /drivers/gpu/drm/ |
| A D | drm_flip_work.c | 52 list_add_tail(&task->node, &work->queued); in drm_flip_work_queue_task() 95 list_splice_tail(&work->queued, &work->commited); in drm_flip_work_commit() 96 INIT_LIST_HEAD(&work->queued); in drm_flip_work_commit() 139 INIT_LIST_HEAD(&work->queued); in drm_flip_work_init() 156 WARN_ON(!list_empty(&work->queued) || !list_empty(&work->commited)); in drm_flip_work_cleanup()
|
| /drivers/md/ |
| A D | dm-cache-background-tracker.c | 21 struct list_head queued; member 42 INIT_LIST_HEAD(&b->queued); in btracker_create() 55 list_for_each_entry_safe (w, tmp, &b->queued, list) { in btracker_destroy() 195 list_add(&w->list, &b->queued); in btracker_queue() 209 if (list_empty(&b->queued)) in btracker_issue() 212 w = list_first_entry(&b->queued, struct bt_work, list); in btracker_issue()
|
| /drivers/media/platform/renesas/vsp1/ |
| A D | vsp1_dl.c | 229 struct vsp1_dl_list *queued; member 860 if (!dlm->queued) in vsp1_dl_list_hw_update_pending() 918 __vsp1_dl_list_put(dlm->queued); in vsp1_dl_list_commit_continuous() 919 dlm->queued = dl; in vsp1_dl_list_commit_continuous() 1039 if (dlm->queued) { in vsp1_dlm_irq_frame_end() 1042 dlm->queued->flags &= ~VSP1_DL_FRAME_END_INTERNAL; in vsp1_dlm_irq_frame_end() 1045 dlm->active = dlm->queued; in vsp1_dlm_irq_frame_end() 1046 dlm->queued = NULL; in vsp1_dlm_irq_frame_end() 1057 dlm->queued = dlm->pending; in vsp1_dlm_irq_frame_end() 1094 __vsp1_dl_list_put(dlm->queued); in vsp1_dlm_reset() [all …]
|
| /drivers/dma/ |
| A D | mpc512x_dma.c | 197 struct list_head queued; member 262 while (!list_empty(&mchan->queued)) { in mpc_dma_execute() 263 mdesc = list_first_entry(&mchan->queued, in mpc_dma_execute() 341 if (!list_empty(&mchan->queued)) in mpc_dma_irq_process() 472 list_move_tail(&mdesc->node, &mchan->queued); in mpc_dma_tx_submit() 558 BUG_ON(!list_empty(&mchan->queued)); in mpc_dma_free_chan_resources() 890 list_splice_tail_init(&mchan->queued, &mchan->free); in mpc_dma_device_terminate_all() 1005 INIT_LIST_HEAD(&mchan->queued); in mpc_dma_probe()
|
| A D | nbpfaxi.c | 220 struct list_head queued; member 609 if (list_empty(&chan->queued)) in nbpf_issue_pending() 612 list_splice_tail_init(&chan->queued, &chan->active); in nbpf_issue_pending() 653 list_for_each_entry(desc, &chan->queued, node) in nbpf_tx_status() 681 list_add_tail(&desc->node, &chan->queued); in nbpf_tx_submit() 862 list_splice_init(&chan->queued, &head); in nbpf_chan_idle() 1059 INIT_LIST_HEAD(&chan->queued); in nbpf_alloc_chan_resources()
|
| /drivers/dma/qcom/ |
| A D | hidma.c | 171 bool queued = false; in hidma_callback() local 177 queued = true; in hidma_callback() 187 if (queued) { in hidma_callback() 212 INIT_LIST_HEAD(&mchan->queued); in hidma_chan_init() 236 list_for_each_entry_safe(qdesc, next, &mchan->queued, node) { in hidma_issue_pending() 330 list_move_tail(&mdesc->node, &mchan->queued); in hidma_tx_submit() 486 list_splice_init(&mchan->queued, &list); in hidma_terminate_channel()
|
| A D | hidma.h | 30 bool queued; /* flag whether this is pending */ member 102 struct list_head queued; member
|
| A D | hidma_ll.c | 160 tre->queued = 0; in hidma_ll_request() 223 tre->queued = 0; in hidma_post_completed() 539 tre->queued = 1; in hidma_ll_queue_request()
|
| /drivers/crypto/inside-secure/ |
| A D | safexcel_hash.c | 319 u64 queued, len; in safexcel_ahash_send_req() local 323 cache_len = queued; in safexcel_ahash_send_req() 343 queued -= extra; in safexcel_ahash_send_req() 345 if (!queued) { in safexcel_ahash_send_req() 386 queued = queued + extra; in safexcel_ahash_send_req() 393 len = queued; in safexcel_ahash_send_req() 413 queued -= cache_len; in safexcel_ahash_send_req() 414 if (!queued) in safexcel_ahash_send_req() 438 sglen = queued; in safexcel_ahash_send_req() 455 queued -= sglen; in safexcel_ahash_send_req() [all …]
|
| /drivers/net/ethernet/mediatek/ |
| A D | mtk_wed_wo.c | 108 if (!q->queued) in mtk_wed_wo_dequeue() 117 q->queued--; in mtk_wed_wo_dequeue() 140 while (q->queued < q->n_desc) { in mtk_wed_wo_queue_refill() 171 q->queued++; in mtk_wed_wo_queue_refill()
|
| /drivers/gpio/ |
| A D | gpio-virtio.c | 36 bool queued; member 214 if (WARN_ON(irq_line->queued || irq_line->masked || irq_line->disabled)) in virtio_gpio_irq_prepare() 229 irq_line->queued = true; in virtio_gpio_irq_prepare() 360 irq_line->queued = false; in ignore_irq()
|
| /drivers/tty/serial/ |
| A D | amba-pl011.c | 248 bool queued; member 554 if (uap->dmatx.queued) in pl011_dma_tx_callback() 573 uap->dmatx.queued = false; in pl011_dma_tx_callback() 613 uap->dmatx.queued = false; in pl011_dma_tx_refill() 632 uap->dmatx.queued = false; in pl011_dma_tx_refill() 662 uap->dmatx.queued = true; in pl011_dma_tx_refill() 694 if (uap->dmatx.queued) { in pl011_dma_tx_irq() 720 if (uap->dmatx.queued) { in pl011_dma_tx_stop() 745 if (!uap->dmatx.queued) { in pl011_dma_tx_start() 803 if (uap->dmatx.queued) { in pl011_dma_flush_buffer() [all …]
|
| /drivers/soc/ti/ |
| A D | Kconfig | 18 Packets are queued/de-queued by writing/reading descriptor address
|
| /drivers/md/dm-vdo/indexer/ |
| A D | volume.c | 377 bool queued; in reserve_read_queue_entry() local 385 queued = (index_value & VOLUME_CACHE_QUEUED_FLAG) != 0; in reserve_read_queue_entry() 387 if (entry->invalid && queued) in reserve_read_queue_entry() 394 if (!queued) in reserve_read_queue_entry() 649 bool queued; in get_page_and_index() local 663 queued = (index_value & VOLUME_CACHE_QUEUED_FLAG) != 0; in get_page_and_index() 666 if (!queued && (index < cache->cache_slots)) { in get_page_and_index() 678 *queue_index = queued ? index : -1; in get_page_and_index()
|
| /drivers/acpi/apei/ |
| A D | ghes.c | 559 bool queued = false; in ghes_handle_arm_hw_error() local 583 queued = ghes_do_memory_failure(err_info->physical_fault_addr, flags); in ghes_handle_arm_hw_error() 597 return queued; in ghes_handle_arm_hw_error() 878 bool queued = false; in ghes_do_proc() local 897 queued = ghes_handle_memory_failure(gdata, sev, sync); in ghes_do_proc() 903 queued = ghes_handle_arm_hw_error(gdata, sev, sync); in ghes_do_proc() 934 if (sync && !queued) { in ghes_do_proc()
|
| /drivers/mtd/ |
| A D | ftl.c | 572 int queued, ret; in reclaim_block() local 579 queued = 0; in reclaim_block() 590 queued = 1; in reclaim_block() 610 if (queued) { in reclaim_block()
|
| /drivers/net/ethernet/airoha/ |
| A D | airoha_eth.c | 558 while (q->queued < q->ndesc - 1) { in airoha_qdma_fill_rx_queue() 571 q->queued++; in airoha_qdma_fill_rx_queue() 637 q->queued--; in airoha_qdma_rx_process() 810 while (q->queued) { in airoha_qdma_cleanup_rx_queue() 818 q->queued--; in airoha_qdma_cleanup_rx_queue() 891 if (!q->queued) in airoha_qdma_tx_napi_poll() 909 q->queued--; in airoha_qdma_tx_napi_poll() 1047 while (q->queued) { in airoha_qdma_cleanup_tx_queue() 1056 q->queued--; in airoha_qdma_cleanup_tx_queue() 1926 if (q->queued + nr_frags > q->ndesc) { in airoha_dev_xmit() [all …]
|
| /drivers/net/wireless/mediatek/mt76/mt7996/ |
| A D | debugfs.c | 603 u32 ctrl, head, tail, queued; in mt7996_hw_queue_read() local 615 queued = mt76_get_field(dev, MT_FL_Q3_CTRL, in mt7996_hw_queue_read() 620 queued, head, tail); in mt7996_hw_queue_read() 788 queue_map[i].queue, q->queued, q->head, in mt7996_xmit_queues_show()
|