Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 25 of 37) sorted by relevance

12

/linux/drivers/dma/
A Dowl-dma.c480 pchan->vchan = vchan; in owl_dma_get_pchan()
547 vchan->txd = txd; in owl_dma_start_next_txd()
645 vchan = pchan->vchan; in owl_dma_interrupt()
646 if (!vchan) { in owl_dma_interrupt()
654 txd = vchan->txd; in owl_dma_interrupt()
706 if (vchan->pchan) in owl_dma_terminate_all()
709 if (vchan->txd) { in owl_dma_terminate_all()
711 vchan->txd = NULL; in owl_dma_terminate_all()
757 if (!vchan->pchan && !vchan->txd) in owl_dma_resume()
780 txd = vchan->txd; in owl_dma_getbytes_chan()
[all …]
A Dsun4i-dma.c239 if (vchan->is_dedicated) { in find_and_use_pchan()
250 pchan->vchan = vchan; in find_and_use_pchan()
267 pchan->vchan = NULL; in release_pchan()
350 if (vchan->processing) { in __execute_vchan_pending()
385 vchan->pchan = pchan; in __execute_vchan_pending()
644 if (vchan->is_dedicated) in sun4i_dma_prep_dma_memcpy()
770 if (vchan->is_dedicated) in sun4i_dma_prep_dma_cyclic()
861 if (vchan->is_dedicated) in sun4i_dma_prep_slave_sg()
915 vchan->pchan = NULL; in sun4i_dma_terminate_all()
1049 vchan = pchan->vchan; in sun4i_dma_interrupt()
[all …]
A Dsun6i-dma.c460 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc()
488 pchan = vchan->phy; in sun6i_dma_tasklet()
521 pchan->vchan = vchan; in sun6i_dma_tasklet()
533 vchan = pchan->vchan; in sun6i_dma_tasklet()
534 if (vchan) { in sun6i_dma_tasklet()
562 vchan = pchan->vchan; in sun6i_dma_interrupt()
563 if (vchan && (status & vchan->irq_type)) { in sun6i_dma_interrupt()
939 vchan->phy = NULL; in sun6i_dma_terminate_all()
1001 if (!vchan->phy && list_empty(&vchan->node)) { in sun6i_dma_issue_pending()
1005 &vchan->vc); in sun6i_dma_issue_pending()
[all …]
A Dst_fdma.c80 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc()
102 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update()
140 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler()
158 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler()
295 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res()
352 int ch_id = fchan->vchan.chan.chan_id; in config_reqctrl()
625 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_pause()
643 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_resume()
663 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_terminate_all()
735 list_del(&fchan->vchan.chan.device_node); in st_fdma_free()
[all …]
A Dls2x-apb-dma.c124 struct virt_dma_chan vchan; member
202 vdesc = vchan_next_desc(&lchan->vchan); in ls2x_dma_start_transfer()
466 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending()
484 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_terminate_all()
508 vchan_synchronize(&lchan->vchan); in ls2x_dma_synchronize()
516 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_pause()
531 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_resume()
553 spin_lock(&lchan->vchan.lock); in ls2x_dma_isr()
568 spin_unlock(&lchan->vchan.lock); in ls2x_dma_isr()
591 lchan->vchan.desc_free = ls2x_dma_desc_free; in ls2x_dma_chan_init()
[all …]
A Didma64.c118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
143 stat = this_cpu_ptr(idma64c->vchan.chan.local); in idma64_chan_irq()
145 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq()
163 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq()
342 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
386 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_tx_status()
453 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_pause()
502 vchan_synchronize(&idma64c->vchan); in idma64_synchronize()
570 idma64c->vchan.desc_free = idma64_vdesc_free; in idma64_probe()
571 vchan_init(&idma64c->vchan, &idma64->dma); in idma64_probe()
[all …]
A Dfsl-edma-common.c49 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
53 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
238 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
773 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
775 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
864 list_del(&chan->vchan.chan.device_node); in fsl_edma_cleanup_vchan()
[all …]
A Ddma-jz4780.c130 struct virt_dma_chan vchan; member
494 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
605 vchan_synchronize(&jzchan->vchan); in jz4780_dma_synchronize()
684 spin_lock(&jzchan->vchan.lock); in jz4780_dma_chan_irq()
690 dev_warn(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq()
695 dev_warn(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq()
724 dev_err(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq()
728 spin_unlock(&jzchan->vchan.lock); in jz4780_dma_chan_irq()
780 vchan_free_chan_resources(&jzchan->vchan); in jz4780_dma_free_chan_resources()
844 &jzdma->chan[data.channel].vchan.chan); in jz4780_of_dma_xlate()
[all …]
A Dfsl-qdma.c181 struct virt_dma_chan vchan; member
318 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
729 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
732 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
999 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc()
1039 spin_lock(&fsl_chan->vchan.lock); in fsl_qdma_issue_pending()
1040 if (vchan_issue_pending(&fsl_chan->vchan)) in fsl_qdma_issue_pending()
1042 spin_unlock(&fsl_chan->vchan.lock); in fsl_qdma_issue_pending()
1050 vchan_synchronize(&fsl_chan->vchan); in fsl_qdma_synchronize()
1263 list_del(&chan->vchan.chan.device_node); in fsl_qdma_cleanup_vchan()
[all …]
A Dst_fdma.h125 struct virt_dma_chan vchan; member
186 + (fchan)->vchan.chan.chan_id * 0x4 \
191 + (fchan)->vchan.chan.chan_id * 0x4 \
208 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
213 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
A Ddma-axi-dmac.c143 struct virt_dma_chan vchan; member
245 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
470 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
481 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
493 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
500 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
509 vchan_synchronize(&chan->vchan); in axi_dmac_synchronize()
524 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_issue_pending()
525 if (vchan_issue_pending(&chan->vchan)) in axi_dmac_issue_pending()
1117 vchan_init(&dmac->chan.vchan, dma_dev); in axi_dmac_probe()
[all …]
A Dpxa_dma.c100 struct pxad_chan *vchan; member
390 if (!phy->vchan) { in lookup_phy()
391 phy->vchan = pchan; in lookup_phy()
425 chan->phy->vchan = NULL; in pxad_free_phy()
455 if (!phy->vchan) in phy_enable()
458 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable()
464 reg = pxad_drcmr(phy->vchan->drcmr); in phy_enable()
487 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_disable()
598 __func__, &phy->vchan); in clear_chan_irq()
606 struct pxad_chan *chan = phy->vchan; in pxad_chan_handler()
[all …]
A Dmmp_pdma.c118 struct mmp_pdma_chan *vchan; member
154 if (!phy->vchan) in enable_chan()
157 reg = DRCMR(phy->vchan->drcmr); in enable_chan()
161 if (phy->vchan->byte_align) in enable_chan()
194 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq()
195 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq()
207 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler()
259 if (!phy->vchan) { in lookup_phy()
260 phy->vchan = pchan; in lookup_phy()
286 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
/linux/drivers/dma/hsu/
A Dhsu.c121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
339 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
369 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_pause()
424 vchan_synchronize(&hsuc->vchan); in hsu_dma_synchronize()
452 hsuc->vchan.desc_free = hsu_dma_desc_free; in hsu_dma_probe()
453 vchan_init(&hsuc->vchan, &hsu->dma); in hsu_dma_probe()
[all …]
A Dhsu.h87 struct virt_dma_chan vchan; member
100 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
/linux/drivers/dma/sf-pdma/
A Dsf-pdma.c105 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
146 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
166 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue()
181 vd = vchan_find_desc(&chan->vchan, cookie); in sf_pdma_desc_residue()
243 struct virt_dma_chan *vchan = &chan->vchan; in sf_pdma_get_first_pending_desc() local
246 if (list_empty(&vchan->desc_issued)) in sf_pdma_get_first_pending_desc()
350 spin_lock(&chan->vchan.lock); in sf_pdma_done_isr()
367 spin_unlock(&chan->vchan.lock); in sf_pdma_done_isr()
487 vchan_init(&chan->vchan, &pdma->dma_dev); in sf_pdma_setup_chans()
605 list_del(&ch->vchan.chan.device_node); in sf_pdma_remove()
[all …]
/linux/drivers/dma/lgm/
A Dlgm-dma.c195 struct virt_dma_chan vchan; member
1021 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_issue_pending()
1022 if (vchan_issue_pending(&c->vchan)) { in ldma_issue_pending()
1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending()
1051 vchan_synchronize(&c->vchan); in ldma_synchronize()
1062 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_terminate_all()
1065 vchan_dma_desc_free_list(&c->vchan, &head); in ldma_terminate_all()
1315 struct virt_dma_chan *vc = &c->vchan; in dma_work()
1321 spin_lock_irqsave(&c->vchan.lock, flags); in dma_work()
1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22()
[all …]
/linux/drivers/dma/stm32/
A Dstm32-dma.c210 struct virt_dma_chan vchan; member
253 return &chan->vchan.chan.dev->device; in chan2dev()
505 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
526 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
570 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
766 spin_lock(&chan->vchan.lock); in stm32_dma_chan_irq()
811 spin_unlock(&chan->vchan.lock); in stm32_dma_chan_irq()
821 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_issue_pending()
839 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_pause()
866 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_resume()
[all …]
A Dstm32-mdma.c237 struct virt_dma_chan vchan; member
281 return &chan->vchan.chan.dev->device; in chan2dev()
1149 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1198 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
1200 if (!vchan_issue_pending(&chan->vchan)) in stm32_mdma_issue_pending()
1218 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_pause()
1294 vchan_synchronize(&chan->vchan); in stm32_mdma_synchronize()
1415 spin_lock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1423 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1476 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
[all …]
A Dstm32-dma3.c281 struct virt_dma_chan vchan; member
323 return &chan->vchan.chan.dev->device; in chan2dev()
743 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma3_chan_start()
983 spin_lock(&chan->vchan.lock); in stm32_dma3_chan_irq()
987 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1032 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq()
1097 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_free_chan_resources()
1415 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_terminate_all()
1438 vchan_synchronize(&chan->vchan); in stm32_dma3_synchronize()
1459 vd = vchan_find_desc(&chan->vchan, cookie); in stm32_dma3_tx_status()
[all …]
/linux/drivers/dma/xilinx/
A Dxdma.c66 struct virt_dma_chan vchan; member
480 xchan->vchan.desc_free = xdma_free_desc; in xdma_alloc_channels()
481 vchan_init(&xchan->vchan, &xdev->dma_dev); in xdma_alloc_channels()
502 if (vchan_issue_pending(&xdma_chan->vchan)) in xdma_issue_pending()
524 vd = vchan_next_desc(&xdma_chan->vchan); in xdma_terminate_all()
553 vchan_synchronize(&xdma_chan->vchan); in xdma_synchronize()
805 vchan_free_chan_resources(&xdma_chan->vchan); in xdma_free_chan_resources()
854 vd = vchan_find_desc(&xdma_chan->vchan, cookie); in xdma_tx_status()
888 spin_lock(&xchan->vchan.lock); in xdma_channel_isr()
894 vd = vchan_next_desc(&xchan->vchan); in xdma_channel_isr()
[all …]
A Dxilinx_dpdma.c227 struct virt_dma_chan vchan; member
925 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer()
1177 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1184 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1258 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err()
1364 spin_lock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1365 if (vchan_issue_pending(&chan->vchan)) in xilinx_dpdma_issue_pending()
1367 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1493 vchan_synchronize(&chan->vchan); in xilinx_dpdma_synchronize()
1596 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_err_task()
[all …]
/linux/drivers/dma/fsl-dpaa2-qdma/
A Ddpaa2-qdma.c73 spin_lock_irqsave(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
74 vchan_get_all_descriptors(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
75 spin_unlock_irqrestore(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
77 vchan_dma_desc_free_list(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
282 spin_lock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
283 if (vchan_issue_pending(&dpaa2_chan->vchan)) { in dpaa2_qdma_issue_pending()
284 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending()
301 spin_unlock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
441 spin_lock(&qchan->vchan.lock); in dpaa2_qdma_fqdan_cb()
444 spin_unlock(&qchan->vchan.lock); in dpaa2_qdma_fqdan_cb()
[all …]
/linux/drivers/dma/ti/
A Dedma.c216 struct virt_dma_chan vchan; member
845 vchan_synchronize(&echan->vchan); in edma_synchronize()
1458 spin_lock(&echan->vchan.lock); in edma_completion_handler()
1463 spin_unlock(&echan->vchan.lock); in edma_completion_handler()
1487 spin_unlock(&echan->vchan.lock); in edma_completion_handler()
1546 spin_lock(&echan->vchan.lock); in edma_error_handler()
1576 spin_unlock(&echan->vchan.lock); in edma_error_handler()
2008 vchan_init(&echan->vchan, m_ddev); in edma_dma_init()
2010 vchan_init(&echan->vchan, s_ddev); in edma_dma_init()
2243 chan = &echan->vchan.chan; in of_edma_xlate()
[all …]
/linux/drivers/dma/amd/qdma/
A Dqdma.c466 q->vchan.desc_free = qdma_free_vdesc; in qdma_alloc_queues()
467 vchan_init(&q->vchan, &qdev->dma_dev); in qdma_alloc_queues()
554 vchan_free_chan_resources(&queue->vchan); in qdma_free_queue_resources()
615 if (!vchan_next_desc(&queue->vchan)) in qdma_xfer_start()
635 spin_lock_irqsave(&queue->vchan.lock, flags); in qdma_issue_pending()
636 if (vchan_issue_pending(&queue->vchan)) { in qdma_issue_pending()
711 struct virt_dma_chan *vc = &q->vchan; in qdma_fill_pending_vdesc()
866 spin_lock_irqsave(&q->vchan.lock, flags); in qdma_queue_isr()
869 vd = vchan_next_desc(&q->vchan); in qdma_queue_isr()
878 vd = vchan_next_desc(&q->vchan); in qdma_queue_isr()
[all …]

Completed in 96 milliseconds

12