Home
last modified time | relevance | path

Searched refs:dchan (Results 1 – 25 of 26) sorted by relevance

12

/linux-6.3-rc2/drivers/rapidio/devices/
A Dtsi721_dma.c306 tsi_err(&bdma_chan->dchan.dev->device, in tsi721_start_dma()
313 tsi_err(&bdma_chan->dchan.dev->device, in tsi721_start_dma()
411 struct dma_chan *dchan = desc->txd.chan; in tsi721_submit_sg() local
586 tsi_err(&bdma_chan->dchan.dev->device, in tsi721_dma_tasklet()
650 tsi_err(&bdma_chan->dchan.dev->device, in tsi721_dma_tasklet()
699 tsi_err(&bdma_chan->dchan.dev->device, in tsi721_tx_submit()
756 dma_cookie_init(dchan); in tsi721_alloc_chan_resources()
860 tsi_err(&dchan->dev->device, in tsi721_prep_rio_sg()
885 tsi_debug(DMA, &dchan->dev->device, in tsi721_prep_rio_sg()
980 bdma_chan->dchan.cookie = 1; in tsi721_register_dma()
[all …]
A Dtsi721.h701 struct dma_chan dchan; member
/linux-6.3-rc2/drivers/dma/
A Dnbpfaxi.c604 struct nbpf_channel *chan = nbpf_to_chan(dchan); in nbpf_issue_pending()
629 struct nbpf_channel *chan = nbpf_to_chan(dchan); in nbpf_tx_status()
692 struct dma_chan *dchan = &chan->dma_chan; in nbpf_desc_page_alloc() local
700 struct device *dev = dchan->device->dev; in nbpf_desc_page_alloc()
864 static int nbpf_pause(struct dma_chan *dchan) in nbpf_pause() argument
883 dev_dbg(dchan->device->dev, "Terminating\n"); in nbpf_terminate_all()
891 static int nbpf_config(struct dma_chan *dchan, in nbpf_config() argument
1092 struct dma_chan *dchan; in nbpf_of_xlate() local
1099 if (!dchan) in nbpf_of_xlate()
1105 chan = nbpf_to_chan(dchan); in nbpf_of_xlate()
[all …]
A Dmmp_pdma.c135 #define to_mmp_pdma_chan(dchan) \ argument
136 container_of(dchan, struct mmp_pdma_chan, chan)
444 mmp_pdma_prep_memcpy(struct dma_chan *dchan, in mmp_pdma_prep_memcpy() argument
452 if (!dchan) in mmp_pdma_prep_memcpy()
458 chan = to_mmp_pdma_chan(dchan); in mmp_pdma_prep_memcpy()
605 mmp_pdma_prep_dma_cyclic(struct dma_chan *dchan, in mmp_pdma_prep_dma_cyclic() argument
614 if (!dchan || !len || !period_len) in mmp_pdma_prep_dma_cyclic()
624 chan = to_mmp_pdma_chan(dchan); in mmp_pdma_prep_dma_cyclic()
699 if (!dchan) in mmp_pdma_config_write()
748 if (!dchan) in mmp_pdma_terminate_all()
[all …]
A Dfsldma.c372 if (!dchan) in fsl_dma_external_start()
375 chan = to_fsl_chan(dchan); in fsl_dma_external_start()
691 struct fsldma_chan *chan = to_fsl_chan(dchan); in fsl_dma_alloc_chan_resources()
744 struct fsldma_chan *chan = to_fsl_chan(dchan); in fsl_dma_free_chan_resources()
759 fsl_dma_prep_memcpy(struct dma_chan *dchan, in fsl_dma_prep_memcpy() argument
767 if (!dchan) in fsl_dma_prep_memcpy()
773 chan = to_fsl_chan(dchan); in fsl_dma_prep_memcpy()
827 if (!dchan) in fsl_dma_device_terminate_all()
830 chan = to_fsl_chan(dchan); in fsl_dma_device_terminate_all()
853 if (!dchan) in fsl_dma_device_config()
[all …]
A Dpxa_dma.c138 #define to_pxad_chan(dchan) \ argument
139 container_of(dchan, struct pxad_chan, vc.chan)
686 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_alloc_chan_resources()
709 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_free_chan_resources()
826 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_issue_pending()
915 pxad_prep_memcpy(struct dma_chan *dchan, in pxad_prep_memcpy() argument
919 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_prep_memcpy()
926 if (!dchan || !len) in pxad_prep_memcpy()
1012 pxad_prep_dma_cyclic(struct dma_chan *dchan, in pxad_prep_dma_cyclic() argument
1023 if (!dchan || !len || !period_len) in pxad_prep_dma_cyclic()
[all …]
A Daltera-msgdma.c332 msgdma_prep_memcpy(struct dma_chan *dchan, dma_addr_t dma_dst, in msgdma_prep_memcpy() argument
335 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_prep_memcpy()
388 msgdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in msgdma_prep_slave_sg() argument
393 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_prep_slave_sg()
461 static int msgdma_dma_config(struct dma_chan *dchan, in msgdma_dma_config() argument
464 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_dma_config()
636 static void msgdma_free_chan_resources(struct dma_chan *dchan) in msgdma_free_chan_resources() argument
638 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_free_chan_resources()
653 static int msgdma_alloc_chan_resources(struct dma_chan *dchan) in msgdma_alloc_chan_resources() argument
655 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_alloc_chan_resources()
A Dxgene-dma.c190 #define to_dma_chan(dchan) \ argument
191 container_of(dchan, struct xgene_dma_chan, dma_chan)
776 struct xgene_dma_chan *chan = to_dma_chan(dchan); in xgene_dma_alloc_chan_resources()
813 struct xgene_dma_chan *chan = to_dma_chan(dchan); in xgene_dma_free_chan_resources()
838 struct dma_chan *dchan, dma_addr_t dst, dma_addr_t *src, in xgene_dma_prep_xor() argument
846 if (unlikely(!dchan || !len)) in xgene_dma_prep_xor()
849 chan = to_dma_chan(dchan); in xgene_dma_prep_xor()
895 if (unlikely(!dchan || !len)) in xgene_dma_prep_pq()
898 chan = to_dma_chan(dchan); in xgene_dma_prep_pq()
964 struct xgene_dma_chan *chan = to_dma_chan(dchan); in xgene_dma_issue_pending()
[all …]
A Dbcm-sba-raid.c85 #define to_sba_device(dchan) \ argument
86 container_of(dchan, struct sba_device, dma_chan)
497 sba_cleanup_pending_requests(to_sba_device(dchan)); in sba_device_terminate_all()
505 struct sba_device *sba = to_sba_device(dchan); in sba_issue_pending()
542 struct sba_device *sba = to_sba_device(dchan); in sba_tx_status()
544 ret = dma_cookie_status(dchan, cookie, txstate); in sba_tx_status()
550 return dma_cookie_status(dchan, cookie, txstate); in sba_tx_status()
616 struct sba_device *sba = to_sba_device(dchan); in sba_prep_dma_interrupt()
731 struct sba_device *sba = to_sba_device(dchan); in sba_prep_dma_memcpy()
870 struct sba_device *sba = to_sba_device(dchan); in sba_prep_dma_xor()
[all …]
A Dmmp_tdma.c137 #define to_mmp_tdma_chan(dchan) container_of(dchan, struct mmp_tdma_chan, chan) argument
A Dste_dma40.c2208 d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, in d40_prep_sg() argument
2212 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan); in d40_prep_sg()
2224 d40_set_runtime_config_write(dchan, &chan->slave_config, direction); in d40_prep_sg()
/linux-6.3-rc2/drivers/dma/dw-edma/
A Ddw-edma-core.c25 struct device *dchan2dev(struct dma_chan *dchan) in dchan2dev() argument
27 return &dchan->dev->device; in dchan2dev()
211 static void dw_edma_device_caps(struct dma_chan *dchan, in dw_edma_device_caps() argument
214 struct dw_edma_chan *chan = dchan2dw_edma_chan(dchan); in dw_edma_device_caps()
232 struct dw_edma_chan *chan = dchan2dw_edma_chan(dchan); in dw_edma_device_config()
240 static int dw_edma_device_pause(struct dma_chan *dchan) in dw_edma_device_pause() argument
329 ret = dma_cookie_status(dchan, cookie, txstate); in dw_edma_device_tx_status()
546 xfer.dchan = dchan; in dw_edma_device_prep_slave_sg()
564 xfer.dchan = dchan; in dw_edma_device_prep_dma_cyclic()
582 xfer.dchan = dchan; in dw_edma_device_prep_interleaved_dma()
[all …]
A Ddw-edma-core.h128 struct dma_chan *dchan; member
146 struct dw_edma_chan *dchan2dw_edma_chan(struct dma_chan *dchan) in dchan2dw_edma_chan() argument
148 return vc2dw_edma_chan(to_virt_chan(dchan)); in dchan2dw_edma_chan()
/linux-6.3-rc2/drivers/dma/sf-pdma/
A Dsf-pdma.c85 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_prep_dma_memcpy()
109 static int sf_pdma_slave_config(struct dma_chan *dchan, in sf_pdma_slave_config() argument
112 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_slave_config()
121 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_alloc_chan_resources()
124 dma_cookie_init(dchan); in sf_pdma_alloc_chan_resources()
139 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_free_chan_resources()
192 sf_pdma_tx_status(struct dma_chan *dchan, in sf_pdma_tx_status() argument
196 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_tx_status()
199 status = dma_cookie_status(dchan, cookie, txstate); in sf_pdma_tx_status()
209 struct sf_pdma_chan *chan = to_sf_pdma_chan(dchan); in sf_pdma_terminate_all()
[all …]
/linux-6.3-rc2/drivers/dma/dw-axi-dmac/
A Ddw-axi-dmac-platform.c315 status = dma_cookie_status(dchan, cookie, txstate); in dma_chan_tx_status()
496 dev_err(dchan2dev(dchan), "%s is non-idle!\n", in dma_chan_free_chan_resources()
506 dev_vdbg(dchan2dev(dchan), in dma_chan_free_chan_resources()
1145 dev_warn(dchan2dev(dchan), in dma_chan_terminate_all()
1167 static int dma_chan_pause(struct dma_chan *dchan) in dma_chan_pause() argument
1224 static int dma_chan_resume(struct dma_chan *dchan) in dma_chan_resume() argument
1287 struct dma_chan *dchan; in dw_axi_dma_of_xlate() local
1289 dchan = dma_get_any_slave_channel(&dw->dma); in dw_axi_dma_of_xlate()
1290 if (!dchan) in dw_axi_dma_of_xlate()
1293 chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_of_xlate()
[all …]
A Ddw-axi-dmac.h119 static inline struct device *dchan2dev(struct dma_chan *dchan) in dchan2dev() argument
121 return &dchan->dev->device; in dchan2dev()
139 static inline struct axi_dma_chan *dchan_to_axi_dma_chan(struct dma_chan *dchan) in dchan_to_axi_dma_chan() argument
141 return vc_to_axi_dma_chan(to_virt_chan(dchan)); in dchan_to_axi_dma_chan()
/linux-6.3-rc2/drivers/dma/xilinx/
A Dzynqmp_dma.c463 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_alloc_chan_resources()
560 static int zynqmp_dma_device_config(struct dma_chan *dchan, in zynqmp_dma_device_config() argument
563 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_device_config()
646 static void zynqmp_dma_issue_pending(struct dma_chan *dchan) in zynqmp_dma_issue_pending() argument
648 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_issue_pending()
677 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_free_chan_resources()
790 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_device_terminate_all()
802 static void zynqmp_dma_synchronize(struct dma_chan *dchan) in zynqmp_dma_synchronize() argument
804 struct zynqmp_dma_chan *chan = to_chan(dchan); in zynqmp_dma_synchronize()
820 struct dma_chan *dchan, dma_addr_t dma_dst, in zynqmp_dma_prep_memcpy() argument
[all …]
A Dxilinx_dpdma.c1197 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_prep_interleaved_dma()
1228 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_alloc_chan_resources()
1254 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_free_chan_resources()
1264 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_issue_pending()
1273 static int xilinx_dpdma_config(struct dma_chan *dchan, in xilinx_dpdma_config() argument
1276 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_config()
1304 static int xilinx_dpdma_pause(struct dma_chan *dchan) in xilinx_dpdma_pause() argument
1306 xilinx_dpdma_chan_pause(to_xilinx_chan(dchan)); in xilinx_dpdma_pause()
1311 static int xilinx_dpdma_resume(struct dma_chan *dchan) in xilinx_dpdma_resume() argument
1313 xilinx_dpdma_chan_unpause(to_xilinx_chan(dchan)); in xilinx_dpdma_resume()
[all …]
A Dxilinx_dma.c900 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_free_chan_resources()
1083 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_alloc_chan_resources()
1184 dma_cookie_init(dchan); in xilinx_dma_alloc_chan_resources()
1241 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_tx_status()
1247 ret = dma_cookie_status(dchan, cookie, txstate); in xilinx_dma_tx_status()
1650 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_issue_pending()
2008 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_dma_prep_interleaved()
2090 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_cdma_prep_memcpy()
2148 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_prep_slave_sg()
2247 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan); in xilinx_dma_prep_dma_cyclic()
[all …]
/linux-6.3-rc2/include/linux/
A Dfsldma.h8 int fsl_dma_external_start(struct dma_chan *dchan, int enable);
A Drio_drv.h396 extern void rio_release_dma(struct dma_chan *dchan);
398 struct rio_dev *rdev, struct dma_chan *dchan,
402 struct dma_chan *dchan, u16 destid,
/linux-6.3-rc2/include/linux/dma/
A Dxilinx_dma.h42 int xilinx_vdma_channel_set_config(struct dma_chan *dchan,
/linux-6.3-rc2/arch/mips/alchemy/common/
A Dirq.c581 void au1300_set_dbdma_gpio(int dchan, unsigned int gpio) in au1300_set_dbdma_gpio() argument
585 if ((dchan >= 0) && (dchan <= 1)) { in au1300_set_dbdma_gpio()
587 r &= ~(0xff << (8 * dchan)); in au1300_set_dbdma_gpio()
588 r |= (gpio & 0x7f) << (8 * dchan); in au1300_set_dbdma_gpio()
/linux-6.3-rc2/drivers/rapidio/
A Drio.c1794 void rio_release_dma(struct dma_chan *dchan) in rio_release_dma() argument
1796 dma_release_channel(dchan); in rio_release_dma()
1816 struct dma_async_tx_descriptor *rio_dma_prep_xfer(struct dma_chan *dchan, in rio_dma_prep_xfer() argument
1822 if (!dchan->device->device_prep_slave_sg) { in rio_dma_prep_xfer()
1832 return dmaengine_prep_rio_sg(dchan, data->sg, data->sg_len, in rio_dma_prep_xfer()
1854 struct dma_chan *dchan, struct rio_dma_data *data, in rio_dma_prep_slave_sg() argument
1857 return rio_dma_prep_xfer(dchan, rdev->destid, data, direction, flags); in rio_dma_prep_slave_sg()
/linux-6.3-rc2/arch/mips/include/asm/mach-au1x00/
A Dau1000.h879 extern void au1300_set_dbdma_gpio(int dchan, unsigned int gpio);

Completed in 75 milliseconds

12