| /drivers/spi/ |
| A D | spi-pxa2xx-dma.c | 98 chan = drv_data->controller->dma_tx; in pxa2xx_spi_dma_prepare_one() 165 dmaengine_terminate_async(drv_data->controller->dma_tx); in pxa2xx_spi_dma_prepare() 173 dma_async_issue_pending(drv_data->controller->dma_tx); in pxa2xx_spi_dma_start() 182 dmaengine_terminate_sync(drv_data->controller->dma_tx); in pxa2xx_spi_dma_stop() 197 if (!controller->dma_tx) in pxa2xx_spi_dma_setup() 203 dma_release_channel(controller->dma_tx); in pxa2xx_spi_dma_setup() 204 controller->dma_tx = NULL; in pxa2xx_spi_dma_setup() 220 if (controller->dma_tx) { in pxa2xx_spi_dma_release() 221 dmaengine_terminate_sync(controller->dma_tx); in pxa2xx_spi_dma_release() 222 dma_release_channel(controller->dma_tx); in pxa2xx_spi_dma_release() [all …]
|
| A D | spi-uniphier.c | 356 if ((!host->dma_tx && !host->dma_rx) in uniphier_spi_can_dma() 357 || (!host->dma_tx && t->tx_buf) in uniphier_spi_can_dma() 445 host->dma_tx, in uniphier_spi_transfer_one_dma() 458 dma_async_issue_pending(host->dma_tx); in uniphier_spi_transfer_one_dma() 717 if (IS_ERR_OR_NULL(host->dma_tx)) { in uniphier_spi_probe() 722 host->dma_tx = NULL; in uniphier_spi_probe() 765 if (!IS_ERR_OR_NULL(host->dma_tx)) { in uniphier_spi_probe() 766 dma_release_channel(host->dma_tx); in uniphier_spi_probe() 767 host->dma_tx = NULL; in uniphier_spi_probe() 783 if (host->dma_tx) in uniphier_spi_remove() [all …]
|
| A D | spi-bcm2835.c | 617 dmaengine_terminate_async(ctlr->dma_tx); in bcm2835_spi_dma_rx_done() 688 chan = ctlr->dma_tx; in bcm2835_spi_prepare_sg() 809 dma_async_issue_pending(ctlr->dma_tx); in bcm2835_spi_transfer_one_dma() 823 dmaengine_terminate_sync(ctlr->dma_tx); in bcm2835_spi_transfer_one_dma() 867 if (ctlr->dma_tx) { in bcm2835_dma_release() 868 dmaengine_terminate_sync(ctlr->dma_tx); in bcm2835_dma_release() 879 dma_release_channel(ctlr->dma_tx); in bcm2835_dma_release() 880 ctlr->dma_tx = NULL; in bcm2835_dma_release() 909 if (IS_ERR(ctlr->dma_tx)) { in bcm2835_dma_init() 912 ctlr->dma_tx = NULL; in bcm2835_dma_init() [all …]
|
| A D | spi-at91-usart.c | 134 ctlr->dma_tx = dma_request_chan(dev, "tx"); in at91_usart_spi_configure_dma() 135 if (IS_ERR(ctlr->dma_tx)) { in at91_usart_spi_configure_dma() 136 err = PTR_ERR(ctlr->dma_tx); in at91_usart_spi_configure_dma() 174 if (!IS_ERR_OR_NULL(ctlr->dma_tx)) in at91_usart_spi_configure_dma() 175 dma_release_channel(ctlr->dma_tx); in at91_usart_spi_configure_dma() 178 ctlr->dma_tx = NULL; in at91_usart_spi_configure_dma() 189 if (ctlr->dma_tx) in at91_usart_spi_release_dma() 190 dma_release_channel(ctlr->dma_tx); in at91_usart_spi_release_dma() 197 if (ctlr->dma_tx) in at91_usart_spi_stop_dma() 198 dmaengine_terminate_all(ctlr->dma_tx); in at91_usart_spi_stop_dma() [all …]
|
| A D | spi-sun6i.c | 248 dmaengine_slave_config(host->dma_tx, &txconf); in sun6i_spi_prepare_dma() 250 txdesc = dmaengine_prep_slave_sg(host->dma_tx, in sun6i_spi_prepare_dma() 269 dma_async_issue_pending(host->dma_tx); in sun6i_spi_prepare_dma() 523 dmaengine_terminate_sync(host->dma_tx); in sun6i_spi_transfer_one() 705 if (IS_ERR(host->dma_tx)) { in sun6i_spi_probe() 712 host->dma_tx = NULL; in sun6i_spi_probe() 725 if (host->dma_tx && host->dma_rx) { in sun6i_spi_probe() 761 if (host->dma_tx) in sun6i_spi_probe() 762 dma_release_channel(host->dma_tx); in sun6i_spi_probe() 774 if (host->dma_tx) in sun6i_spi_remove() [all …]
|
| A D | spi-pic32.c | 309 if (!host->dma_rx || !host->dma_tx) in pic32_spi_dma_transfer() 347 dma_async_issue_pending(host->dma_tx); in pic32_spi_dma_transfer() 553 dmaengine_terminate_all(host->dma_tx); in pic32_spi_one_transfer() 623 if (IS_ERR(host->dma_tx)) { in pic32_spi_dma_prep() 624 if (PTR_ERR(host->dma_tx) == -EPROBE_DEFER) in pic32_spi_dma_prep() 629 host->dma_tx = NULL; in pic32_spi_dma_prep() 647 if (host->dma_tx) { in pic32_spi_dma_prep() 648 dma_release_channel(host->dma_tx); in pic32_spi_dma_prep() 649 host->dma_tx = NULL; in pic32_spi_dma_prep() 664 if (pic32s->host->dma_tx) in pic32_spi_dma_unprep() [all …]
|
| A D | spi-fsl-lpspi.c | 607 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 615 dma_async_issue_pending(controller->dma_tx); in fsl_lpspi_dma_transfer() 628 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 638 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 648 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 677 if (controller->dma_tx) { in fsl_lpspi_dma_exit() 678 dma_release_channel(controller->dma_tx); in fsl_lpspi_dma_exit() 679 controller->dma_tx = NULL; in fsl_lpspi_dma_exit() 691 if (IS_ERR(controller->dma_tx)) { in fsl_lpspi_dma_init() 692 ret = PTR_ERR(controller->dma_tx); in fsl_lpspi_dma_init() [all …]
|
| A D | spi-rockchip.c | 295 dmaengine_terminate_async(ctlr->dma_tx); in rockchip_spi_handle_err() 496 ctlr->dma_tx, in rockchip_spi_prepare_dma() 524 dma_async_issue_pending(ctlr->dma_tx); in rockchip_spi_prepare_dma() 664 dmaengine_terminate_sync(ctlr->dma_tx); in rockchip_spi_target_abort() 873 if (IS_ERR(ctlr->dma_tx)) { in rockchip_spi_probe() 879 ctlr->dma_tx = NULL; in rockchip_spi_probe() 892 if (ctlr->dma_tx && ctlr->dma_rx) { in rockchip_spi_probe() 924 if (ctlr->dma_tx) in rockchip_spi_probe() 925 dma_release_channel(ctlr->dma_tx); in rockchip_spi_probe() 944 if (ctlr->dma_tx) in rockchip_spi_remove() [all …]
|
| A D | spi-ep93xx.c | 92 struct dma_chan *dma_tx; member 299 chan = espi->dma_tx; in ep93xx_spi_dma_prepare() 383 chan = espi->dma_tx; in ep93xx_spi_dma_finish() 427 dma_async_issue_pending(espi->dma_tx); in ep93xx_spi_dma_transfer() 588 espi->dma_tx = dma_request_chan(dev, "tx"); in ep93xx_spi_setup_dma() 589 if (IS_ERR(espi->dma_tx)) { in ep93xx_spi_setup_dma() 590 ret = dev_err_probe(dev, PTR_ERR(espi->dma_tx), "tx DMA setup failed"); in ep93xx_spi_setup_dma() 611 if (espi->dma_tx) { in ep93xx_spi_release_dma() 612 dma_release_channel(espi->dma_tx); in ep93xx_spi_release_dma()
|
| A D | spi-omap2-mcspi.c | 94 struct dma_chan *dma_tx; member 441 dma_async_issue_pending(mcspi_dma->dma_tx); in omap2_mcspi_tx_dma() 656 dmaengine_terminate_sync(mcspi_dma->dma_tx); in omap2_mcspi_txrx_dma() 1022 if (IS_ERR(mcspi_dma->dma_tx)) { in omap2_mcspi_request_dma() 1023 ret = PTR_ERR(mcspi_dma->dma_tx); in omap2_mcspi_request_dma() 1024 mcspi_dma->dma_tx = NULL; in omap2_mcspi_request_dma() 1049 if (mcspi_dma->dma_tx) { in omap2_mcspi_release_dma() 1050 dma_release_channel(mcspi_dma->dma_tx); in omap2_mcspi_release_dma() 1051 mcspi_dma->dma_tx = NULL; in omap2_mcspi_release_dma() 1341 if (!mcspi_dma->dma_rx || !mcspi_dma->dma_tx) in omap2_mcspi_can_dma() [all …]
|
| A D | spi-davinci.c | 120 struct dma_chan *dma_tx; member 395 if (dspi->dma_rx && dspi->dma_tx) in davinci_spi_of_setup() 622 dmaengine_slave_config(dspi->dma_tx, &dma_tx_conf); in davinci_spi_bufs() 640 txdesc = dmaengine_prep_slave_sg(dspi->dma_tx, in davinci_spi_bufs() 658 dma_async_issue_pending(dspi->dma_tx); in davinci_spi_bufs() 757 dspi->dma_tx = dma_request_chan(sdev, "tx"); in davinci_spi_request_dma() 758 if (IS_ERR(dspi->dma_tx)) { in davinci_spi_request_dma() 760 return PTR_ERR(dspi->dma_tx); in davinci_spi_request_dma() 957 dspi->dma_tx = NULL; in davinci_spi_probe() 998 dma_release_channel(dspi->dma_tx); in davinci_spi_probe() [all …]
|
| A D | spi-sh-msiof.c | 674 dma_sync_single_for_device(p->ctlr->dma_tx->device->dev, in sh_msiof_dma_once() 676 desc_tx = dmaengine_prep_slave_single(p->ctlr->dma_tx, in sh_msiof_dma_once() 714 dma_async_issue_pending(p->ctlr->dma_tx); in sh_msiof_dma_once() 764 dmaengine_terminate_sync(p->ctlr->dma_tx); in sh_msiof_dma_once() 848 while (ctlr->dma_tx && len > 15) { in sh_msiof_transfer_one() 1123 if (!ctlr->dma_tx) in sh_msiof_request_dma() 1139 tx_dev = ctlr->dma_tx->device->dev; in sh_msiof_request_dma() 1163 dma_release_channel(ctlr->dma_tx); in sh_msiof_request_dma() 1164 ctlr->dma_tx = NULL; in sh_msiof_request_dma() 1172 if (!ctlr->dma_tx) in sh_msiof_release_dma() [all …]
|
| A D | spi-stm32.c | 367 struct dma_chan *dma_tx; member 856 if (spi->cur_usedma && spi->dma_tx) in stm32fx_spi_disable() 899 if (spi->cur_usedma && spi->dma_tx) in stm32h7_spi_disable() 1617 if (spi->tx_buf && spi->dma_tx) { in stm32_spi_transfer_one_dma() 2407 if (IS_ERR(spi->dma_tx)) { in stm32_spi_probe() 2408 ret = PTR_ERR(spi->dma_tx); in stm32_spi_probe() 2409 spi->dma_tx = NULL; in stm32_spi_probe() 2415 ctrl->dma_tx = spi->dma_tx; in stm32_spi_probe() 2430 if (spi->dma_tx || spi->dma_rx) in stm32_spi_probe() 2494 if (spi->dma_tx) in stm32_spi_probe() [all …]
|
| A D | spi-ingenic.c | 185 tx_desc = spi_ingenic_prepare_dma(ctlr, ctlr->dma_tx, in spi_ingenic_dma_tx() 194 dma_async_issue_pending(ctlr->dma_tx); in spi_ingenic_dma_tx() 336 ret = dma_get_slave_caps(ctlr->dma_tx, &caps); in spi_ingenic_can_dma() 354 ctlr->dma_tx = chan; in spi_ingenic_request_dma() 370 if (ctlr->dma_tx) in spi_ingenic_release_dma() 371 dma_release_channel(ctlr->dma_tx); in spi_ingenic_release_dma()
|
| A D | spi-atmel.c | 578 if (IS_ERR(host->dma_tx)) { in atmel_spi_configure_dma() 579 err = PTR_ERR(host->dma_tx); in atmel_spi_configure_dma() 601 dma_chan_name(host->dma_tx), in atmel_spi_configure_dma() 608 if (!IS_ERR(host->dma_tx)) in atmel_spi_configure_dma() 609 dma_release_channel(host->dma_tx); in atmel_spi_configure_dma() 611 host->dma_tx = host->dma_rx = NULL; in atmel_spi_configure_dma() 619 if (host->dma_tx) in atmel_spi_stop_dma() 620 dmaengine_terminate_all(host->dma_tx); in atmel_spi_stop_dma() 629 if (host->dma_tx) { in atmel_spi_release_dma() 630 dma_release_channel(host->dma_tx); in atmel_spi_release_dma() [all …]
|
| A D | spi-qup.c | 424 chan = host->dma_tx; in spi_qup_prep_sg() 444 dmaengine_terminate_all(host->dma_tx); in spi_qup_dma_terminate() 528 dma_async_issue_pending(host->dma_tx); in spi_qup_do_dma() 932 IS_ERR_OR_NULL(host->dma_tx)) in spi_qup_can_dma() 949 if (!IS_ERR_OR_NULL(host->dma_tx)) in spi_qup_release_dma() 950 dma_release_channel(host->dma_tx); in spi_qup_release_dma() 966 host->dma_tx = dma_request_chan(dev, "tx"); in spi_qup_init_dma() 967 if (IS_ERR(host->dma_tx)) { in spi_qup_init_dma() 968 ret = PTR_ERR(host->dma_tx); in spi_qup_init_dma() 989 ret = dmaengine_slave_config(host->dma_tx, tx_conf); in spi_qup_init_dma() [all …]
|
| A D | spi-rspi.c | 569 desc_tx = dmaengine_prep_slave_sg(rspi->ctlr->dma_tx, tx->sgl, in rspi_dma_transfer() 609 dma_async_issue_pending(rspi->ctlr->dma_tx); in rspi_dma_transfer() 616 dmaengine_synchronize(rspi->ctlr->dma_tx); in rspi_dma_transfer() 625 dmaengine_terminate_sync(rspi->ctlr->dma_tx); in rspi_dma_transfer() 1144 ctlr->dma_tx = rspi_request_dma_chan(dev, DMA_MEM_TO_DEV, dma_tx_id, in rspi_request_dma() 1146 if (!ctlr->dma_tx) in rspi_request_dma() 1152 dma_release_channel(ctlr->dma_tx); in rspi_request_dma() 1153 ctlr->dma_tx = NULL; in rspi_request_dma() 1164 if (ctlr->dma_tx) in rspi_release_dma() 1165 dma_release_channel(ctlr->dma_tx); in rspi_release_dma()
|
| /drivers/mmc/host/ |
| A D | jz4740_mmc.c | 179 struct dma_chan *dma_tx; member 223 dma_release_channel(host->dma_tx); in jz4740_mmc_release_dma_channels() 232 host->dma_tx = dma_request_chan(dev, "tx-rx"); in jz4740_mmc_acquire_dma_channels() 233 if (!IS_ERR(host->dma_tx)) in jz4740_mmc_acquire_dma_channels() 236 if (PTR_ERR(host->dma_tx) != -ENODEV) { in jz4740_mmc_acquire_dma_channels() 238 return PTR_ERR(host->dma_tx); in jz4740_mmc_acquire_dma_channels() 242 if (IS_ERR(host->dma_tx)) { in jz4740_mmc_acquire_dma_channels() 244 return PTR_ERR(host->dma_tx); in jz4740_mmc_acquire_dma_channels() 250 dma_release_channel(host->dma_tx); in jz4740_mmc_acquire_dma_channels() 258 if (host->dma_tx) { in jz4740_mmc_acquire_dma_channels() [all …]
|
| A D | omap.c | 134 struct dma_chan *dma_tx; member 423 c = host->dma_tx; in mmc_omap_release_dma() 1007 c = host->dma_tx; in mmc_omap_prepare_data() 1087 host->dma_tx : host->dma_rx; in mmc_omap_start_request() 1445 if (IS_ERR(host->dma_tx)) { in mmc_omap_probe() 1446 ret = PTR_ERR(host->dma_tx); in mmc_omap_probe() 1450 host->dma_tx = NULL; in mmc_omap_probe() 1458 if (host->dma_tx) in mmc_omap_probe() 1506 if (host->dma_tx) in mmc_omap_probe() 1507 dma_release_channel(host->dma_tx); in mmc_omap_probe() [all …]
|
| A D | davinci_mmc.c | 184 struct dma_chan *dma_tx; member 384 sync_dev = host->dma_tx; in davinci_abort_dma() 404 chan = host->dma_tx; in mmc_davinci_send_dma_request() 405 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request() 407 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request() 480 dma_release_channel(host->dma_tx); in davinci_release_dma_channels() 486 host->dma_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in davinci_acquire_dma_channels() 487 if (IS_ERR(host->dma_tx)) { in davinci_acquire_dma_channels() 489 return PTR_ERR(host->dma_tx); in davinci_acquire_dma_channels() 495 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
|
| /drivers/mtd/nand/raw/ |
| A D | intel-nand-controller.c | 118 struct dma_chan *dma_tx; member 282 dmaengine_terminate_async(ebu_host->dma_tx); in ebu_dma_tx_callback() 305 chan = ebu_host->dma_tx; in ebu_dma_start() 572 if (ebu_host->dma_tx) in ebu_dma_cleanup() 573 dma_release_channel(ebu_host->dma_tx); in ebu_dma_cleanup() 641 ebu_host->dma_tx = dma_request_chan(dev, "tx"); in ebu_nand_probe() 642 if (IS_ERR(ebu_host->dma_tx)) { in ebu_nand_probe() 643 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx), in ebu_nand_probe()
|
| /drivers/net/ethernet/micrel/ |
| A D | ks8842.c | 147 #define KS8842_USE_DMA(adapter) (((adapter)->dma_tx.channel != -1) && \ 159 struct ks8842_tx_dma_ctl dma_tx; member 424 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_tx_frame_dma() 847 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_dma_tx_cb() 863 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_stop_dma() 885 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_dealloc_dma_bufs() 917 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_alloc_dma_bufs() 990 adapter->dma_tx.channel = -1; in ks8842_open() 1044 if (adapter->dma_tx.adesc) in ks8842_xmit_frame() 1175 adapter->dma_tx.channel = pdata->tx_dma_channel; in ks8842_probe() [all …]
|
| /drivers/net/ethernet/samsung/sxgbe/ |
| A D | sxgbe_dma.c | 44 int fix_burst, int pbl, dma_addr_t dma_tx, in sxgbe_dma_channel_init() argument 66 writel(upper_32_bits(dma_tx), in sxgbe_dma_channel_init() 68 writel(lower_32_bits(dma_tx), in sxgbe_dma_channel_init() 80 dma_addr = dma_tx + ((t_rsize - 1) * SXGBE_DESC_SIZE_BYTES); in sxgbe_dma_channel_init()
|
| /drivers/i2c/busses/ |
| A D | i2c-sh_mobile.c | 136 struct dma_chan *dma_tx; member 449 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_cleanup_dma() 512 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_xfer_dma() 522 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV, in sh_mobile_i2c_xfer_dma() 815 if (!IS_ERR(pd->dma_tx)) { in sh_mobile_i2c_release_dma() 816 dma_release_channel(pd->dma_tx); in sh_mobile_i2c_release_dma() 817 pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_release_dma() 920 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_probe()
|
| /drivers/net/ethernet/calxeda/ |
| A D | xgmac.c | 360 struct xgmac_dma_desc *dma_tx; member 750 priv->dma_tx = dma_alloc_coherent(priv->device, in xgmac_dma_desc_rings_init() 755 if (!priv->dma_tx) in xgmac_dma_desc_rings_init() 760 priv->dma_rx, priv->dma_tx, in xgmac_dma_desc_rings_init() 822 p = priv->dma_tx + i; in xgmac_free_tx_skbufs() 843 if (priv->dma_tx) { in xgmac_free_dma_desc_rings() 846 priv->dma_tx, priv->dma_tx_phy); in xgmac_free_dma_desc_rings() 847 priv->dma_tx = NULL; in xgmac_free_dma_desc_rings() 1092 desc = priv->dma_tx + entry; in xgmac_xmit() 1115 desc = priv->dma_tx + entry; in xgmac_xmit() [all …]
|