| /linux/drivers/dma/ |
| A D | owl-dma.c | 479 pchan->vchan = vchan; in owl_dma_get_pchan() 546 vchan->txd = txd; in owl_dma_start_next_txd() 644 vchan = pchan->vchan; in owl_dma_interrupt() 645 if (!vchan) { in owl_dma_interrupt() 653 txd = vchan->txd; in owl_dma_interrupt() 705 if (vchan->pchan) in owl_dma_terminate_all() 708 if (vchan->txd) { in owl_dma_terminate_all() 710 vchan->txd = NULL; in owl_dma_terminate_all() 756 if (!vchan->pchan && !vchan->txd) in owl_dma_resume() 779 txd = vchan->txd; in owl_dma_getbytes_chan() [all …]
|
| A D | sun4i-dma.c | 228 if (vchan->is_dedicated) { in find_and_use_pchan() 239 pchan->vchan = vchan; in find_and_use_pchan() 256 pchan->vchan = NULL; in release_pchan() 339 if (vchan->processing) { in __execute_vchan_pending() 374 vchan->pchan = pchan; in __execute_vchan_pending() 633 if (vchan->is_dedicated) in sun4i_dma_prep_dma_memcpy() 750 if (vchan->is_dedicated) in sun4i_dma_prep_dma_cyclic() 841 if (vchan->is_dedicated) in sun4i_dma_prep_slave_sg() 895 vchan->pchan = NULL; in sun4i_dma_terminate_all() 1029 vchan = pchan->vchan; in sun4i_dma_interrupt() [all …]
|
| A D | sun6i-dma.c | 453 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc() 481 pchan = vchan->phy; in sun6i_dma_tasklet() 514 pchan->vchan = vchan; in sun6i_dma_tasklet() 526 vchan = pchan->vchan; in sun6i_dma_tasklet() 527 if (vchan) { in sun6i_dma_tasklet() 555 vchan = pchan->vchan; in sun6i_dma_interrupt() 556 if (vchan && (status & vchan->irq_type)) { in sun6i_dma_interrupt() 914 vchan->phy = NULL; in sun6i_dma_terminate_all() 976 if (!vchan->phy && list_empty(&vchan->node)) { in sun6i_dma_issue_pending() 980 &vchan->vc); in sun6i_dma_issue_pending() [all …]
|
| A D | st_fdma.c | 79 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc() 101 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update() 139 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler() 157 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler() 294 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res() 351 int ch_id = fchan->vchan.chan.chan_id; in config_reqctrl() 624 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_pause() 642 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_resume() 662 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_terminate_all() 734 list_del(&fchan->vchan.chan.device_node); in st_fdma_free() [all …]
|
| A D | fsl-edma-common.c | 48 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request() 65 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request() 109 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux() 166 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all() 182 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause() 275 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_desc_residue() 350 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_set_tcd_regs() 624 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc() 626 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc() 697 list_del(&chan->vchan.chan.device_node); in fsl_edma_cleanup_vchan() [all …]
|
| A D | idma64.c | 118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer() 142 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq() 159 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq() 334 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending() 378 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_tx_status() 445 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_pause() 460 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_resume() 494 vchan_synchronize(&idma64c->vchan); in idma64_synchronize() 562 idma64c->vchan.desc_free = idma64_vdesc_free; in idma64_probe() 563 vchan_init(&idma64c->vchan, &idma64->dma); in idma64_probe() [all …]
|
| A D | dma-jz4780.c | 129 struct virt_dma_chan vchan; member 486 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin() 597 vchan_synchronize(&jzchan->vchan); in jz4780_dma_synchronize() 676 spin_lock(&jzchan->vchan.lock); in jz4780_dma_chan_irq() 682 dev_warn(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq() 687 dev_warn(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq() 716 dev_err(&jzchan->vchan.chan.dev->device, in jz4780_dma_chan_irq() 720 spin_unlock(&jzchan->vchan.lock); in jz4780_dma_chan_irq() 772 vchan_free_chan_resources(&jzchan->vchan); in jz4780_dma_free_chan_resources() 828 &jzdma->chan[data.channel].vchan.chan); in jz4780_of_dma_xlate() [all …]
|
| A D | dma-axi-dmac.c | 120 struct virt_dma_chan vchan; member 219 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer() 419 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler() 430 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler() 442 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all() 449 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all() 458 vchan_synchronize(&chan->vchan); in axi_dmac_synchronize() 469 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_issue_pending() 470 if (vchan_issue_pending(&chan->vchan)) in axi_dmac_issue_pending() 972 vchan_init(&dmac->chan.vchan, dma_dev); in axi_dmac_probe() [all …]
|
| A D | stm32-mdma.c | 259 struct virt_dma_chan vchan; member 302 return &chan->vchan.chan.dev->device; in chan2dev() 1123 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer() 1172 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending() 1174 if (!vchan_issue_pending(&chan->vchan)) in stm32_mdma_issue_pending() 1192 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_pause() 1264 vchan_synchronize(&chan->vchan); in stm32_mdma_synchronize() 1376 spin_lock(&chan->vchan.lock); in stm32_mdma_irq_handler() 1384 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler() 1434 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler() [all …]
|
| A D | stm32-dma.c | 199 struct virt_dma_chan vchan; member 239 return &chan->vchan.chan.dev->device; in chan2dev() 496 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all() 508 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all() 517 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize() 554 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer() 652 spin_lock(&chan->vchan.lock); in stm32_dma_chan_irq() 695 spin_unlock(&chan->vchan.lock); in stm32_dma_chan_irq() 705 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_issue_pending() 1194 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_tx_status() [all …]
|
| A D | fsl-qdma.c | 176 struct virt_dma_chan vchan; member 313 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources() 729 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete() 732 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete() 999 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc() 1039 spin_lock(&fsl_chan->vchan.lock); in fsl_qdma_issue_pending() 1040 if (vchan_issue_pending(&fsl_chan->vchan)) in fsl_qdma_issue_pending() 1042 spin_unlock(&fsl_chan->vchan.lock); in fsl_qdma_issue_pending() 1050 vchan_synchronize(&fsl_chan->vchan); in fsl_qdma_synchronize() 1268 list_del(&chan->vchan.chan.device_node); in fsl_qdma_cleanup_vchan() [all …]
|
| A D | st_fdma.h | 125 struct virt_dma_chan vchan; member 186 + (fchan)->vchan.chan.chan_id * 0x4 \ 191 + (fchan)->vchan.chan.chan_id * 0x4 \ 208 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \ 213 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
|
| A D | fsl-edma.c | 28 vchan_synchronize(&fsl_chan->vchan); in fsl_edma_synchronize() 48 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler() 52 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler() 69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler() 361 fsl_chan->vchan.desc_free = fsl_edma_free_desc; in fsl_edma_probe() 362 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe() 450 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late() 459 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late()
|
| A D | pxa_dma.c | 100 struct pxad_chan *vchan; member 390 if (!phy->vchan) { in lookup_phy() 391 phy->vchan = pchan; in lookup_phy() 425 chan->phy->vchan = NULL; in pxad_free_phy() 455 if (!phy->vchan) in phy_enable() 458 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable() 464 reg = pxad_drcmr(phy->vchan->drcmr); in phy_enable() 487 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_disable() 598 __func__, &phy->vchan); in clear_chan_irq() 606 struct pxad_chan *chan = phy->vchan; in pxad_chan_handler() [all …]
|
| A D | mcf-edma.c | 37 spin_lock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler() 41 spin_unlock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler() 58 spin_unlock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler() 230 mcf_chan->vchan.desc_free = fsl_edma_free_desc; in mcf_edma_probe() 231 vchan_init(&mcf_chan->vchan, &mcf_edma->dma_dev); in mcf_edma_probe()
|
| A D | mmp_pdma.c | 119 struct mmp_pdma_chan *vchan; member 155 if (!phy->vchan) in enable_chan() 158 reg = DRCMR(phy->vchan->drcmr); in enable_chan() 162 if (phy->vchan->byte_align) in enable_chan() 195 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq() 196 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq() 208 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler() 260 if (!phy->vchan) { in lookup_phy() 261 phy->vchan = pchan; in lookup_phy() 287 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
|
| /linux/drivers/dma/hsu/ |
| A D | hsu.c | 113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer() 160 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status() 214 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq() 216 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq() 295 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending() 331 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status() 361 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_pause() 416 vchan_synchronize(&hsuc->vchan); in hsu_dma_synchronize() 444 hsuc->vchan.desc_free = hsu_dma_desc_free; in hsu_dma_probe() 445 vchan_init(&hsuc->vchan, &hsu->dma); in hsu_dma_probe() [all …]
|
| A D | hsu.h | 83 struct virt_dma_chan vchan; member 96 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
|
| /linux/drivers/dma/sf-pdma/ |
| A D | sf-pdma.c | 113 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy() 155 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources() 175 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue() 184 vd = vchan_find_desc(&chan->vchan, cookie); in sf_pdma_desc_residue() 219 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_terminate_all() 269 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_issue_pending() 332 spin_lock(&chan->vchan.lock); in sf_pdma_done_isr() 349 spin_unlock(&chan->vchan.lock); in sf_pdma_done_isr() 473 vchan_init(&chan->vchan, &pdma->dma_dev); in sf_pdma_setup_chans() 564 list_del(&ch->vchan.chan.device_node); in sf_pdma_remove() [all …]
|
| /linux/drivers/dma/lgm/ |
| A D | lgm-dma.c | 195 struct virt_dma_chan vchan; member 1021 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_issue_pending() 1022 if (vchan_issue_pending(&c->vchan)) { in ldma_issue_pending() 1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending() 1051 vchan_synchronize(&c->vchan); in ldma_synchronize() 1062 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_terminate_all() 1065 vchan_dma_desc_free_list(&c->vchan, &head); in ldma_terminate_all() 1315 struct virt_dma_chan *vc = &c->vchan; in dma_work() 1321 spin_lock_irqsave(&c->vchan.lock, flags); in dma_work() 1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22() [all …]
|
| /linux/drivers/staging/ralink-gdma/ |
| A D | ralink-gdma.c | 107 struct virt_dma_chan vchan; member 227 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_terminate_all() 230 vchan_get_all_descriptors(&chan->vchan, &head); in gdma_dma_terminate_all() 231 spin_unlock_irqrestore(&chan->vchan.lock, flags); in gdma_dma_terminate_all() 233 vchan_dma_desc_free_list(&chan->vchan, &head); in gdma_dma_terminate_all() 416 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 435 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_chan_irq() 499 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_issue_pending() 668 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_tx_status() 866 chan->vchan.desc_free = gdma_dma_desc_free; in gdma_dma_probe() [all …]
|
| /linux/drivers/staging/mt7621-dma/ |
| A D | hsdma-mt7621.c | 144 struct virt_dma_chan vchan; member 288 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all() 292 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all() 294 vchan_dma_desc_free_list(&chan->vchan, &head); in mtk_hsdma_terminate_all() 387 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 405 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_chan_done() 419 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_chan_done() 446 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_issue_pending() 455 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_issue_pending() 700 chan->vchan.desc_free = mtk_hsdma_desc_free; in mtk_hsdma_probe() [all …]
|
| /linux/drivers/dma/fsl-dpaa2-qdma/ |
| A D | dpaa2-qdma.c | 71 spin_lock_irqsave(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources() 72 vchan_get_all_descriptors(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources() 73 spin_unlock_irqrestore(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources() 75 vchan_dma_desc_free_list(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources() 280 spin_lock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending() 281 if (vchan_issue_pending(&dpaa2_chan->vchan)) { in dpaa2_qdma_issue_pending() 282 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending() 299 spin_unlock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending() 439 spin_lock(&qchan->vchan.lock); in dpaa2_qdma_fqdan_cb() 442 spin_unlock(&qchan->vchan.lock); in dpaa2_qdma_fqdan_cb() [all …]
|
| /linux/drivers/dma/xilinx/ |
| A D | xilinx_dpdma.c | 225 struct virt_dma_chan vchan; member 845 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer() 1175 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err() 1178 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err() 1255 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources() 1266 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_issue_pending() 1267 if (vchan_issue_pending(&chan->vchan)) in xilinx_dpdma_issue_pending() 1350 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_terminate_all() 1390 vchan_synchronize(&chan->vchan); in xilinx_dpdma_synchronize() 1570 vchan_init(&chan->vchan, &xdev->common); in xilinx_dpdma_chan_init() [all …]
|
| /linux/drivers/dma/ti/ |
| A D | edma.c | 225 struct virt_dma_chan vchan; member 902 vchan_synchronize(&echan->vchan); in edma_synchronize() 1515 spin_lock(&echan->vchan.lock); in edma_completion_handler() 1520 spin_unlock(&echan->vchan.lock); in edma_completion_handler() 1544 spin_unlock(&echan->vchan.lock); in edma_completion_handler() 1603 spin_lock(&echan->vchan.lock); in edma_error_handler() 1633 spin_unlock(&echan->vchan.lock); in edma_error_handler() 2066 vchan_init(&echan->vchan, m_ddev); in edma_dma_init() 2068 vchan_init(&echan->vchan, s_ddev); in edma_dma_init() 2301 chan = &echan->vchan.chan; in of_edma_xlate() [all …]
|