| /drivers/mtd/nand/ |
| A D | qpic_common.c | 171 } else if (chan == nandc->tx_chan) { in qcom_prepare_bam_async_desc() 335 ret = qcom_prepare_bam_async_desc(nandc, nandc->tx_chan, in qcom_prep_bam_dma_desc_data() 571 ret = qcom_prepare_bam_async_desc(nandc, nandc->tx_chan, in qcom_submit_descs() 592 dma_async_issue_pending(nandc->tx_chan); in qcom_submit_descs() 654 if (nandc->tx_chan) in qcom_nandc_unalloc() 655 dma_release_channel(nandc->tx_chan); in qcom_nandc_unalloc() 718 nandc->tx_chan = dma_request_chan(nandc->dev, "tx"); in qcom_nandc_alloc() 719 if (IS_ERR(nandc->tx_chan)) { in qcom_nandc_alloc() 720 ret = PTR_ERR(nandc->tx_chan); in qcom_nandc_alloc() 721 nandc->tx_chan = NULL; in qcom_nandc_alloc()
|
| /drivers/tty/serial/ |
| A D | samsung_tty.c | 109 struct dma_chan *tx_chan; member 308 dmaengine_pause(dma->tx_chan); in s3c24xx_serial_stop_tx() 310 dmaengine_terminate_all(dma->tx_chan); in s3c24xx_serial_stop_tx() 464 dma_async_issue_pending(dma->tx_chan); in s3c24xx_serial_start_tx_dma() 564 dmaengine_pause(dma->tx_chan); in s3c24xx_serial_stop_rx() 1087 if (IS_ERR(dma->tx_chan)) { in s3c24xx_serial_request_dma() 1089 ret = PTR_ERR(dma->tx_chan); in s3c24xx_serial_request_dma() 1139 dma_release_channel(dma->tx_chan); in s3c24xx_serial_request_dma() 1161 if (dma->tx_chan) { in s3c24xx_serial_release_dma() 1165 dma_release_channel(dma->tx_chan); in s3c24xx_serial_release_dma() [all …]
|
| /drivers/net/ethernet/broadcom/ |
| A D | bcm63xx_enet.c | 499 ENETDMAC_IR, priv->tx_chan); in bcm_enet_poll() 521 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_poll() 648 ENETDMAC_CHANCFG, priv->tx_chan); in bcm_enet_start_xmit() 1032 ENETDMAC_RSTART, priv->tx_chan); in bcm_enet_open() 1056 ENETDMAC_MAXBURST, priv->tx_chan); in bcm_enet_open() 1092 ENETDMAC_IR, priv->tx_chan); in bcm_enet_open() 1100 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_open() 1787 priv->tx_chan = pd->tx_chan; in bcm_enet_probe() 2260 ENETDMAC_IR, priv->tx_chan); in bcm_enetsw_open() 2268 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enetsw_open() [all …]
|
| A D | bcm63xx_enet.h | 254 int tx_chan; member
|
| /drivers/remoteproc/ |
| A D | xlnx_r5_remoteproc.c | 87 struct mbox_chan *tx_chan; member 280 ipi->tx_chan = mbox_request_channel_byname(mbox_cl, "tx"); in zynqmp_r5_setup_mbox() 281 if (IS_ERR(ipi->tx_chan)) { in zynqmp_r5_setup_mbox() 282 ipi->tx_chan = NULL; in zynqmp_r5_setup_mbox() 290 mbox_free_channel(ipi->tx_chan); in zynqmp_r5_setup_mbox() 292 ipi->tx_chan = NULL; in zynqmp_r5_setup_mbox() 308 if (ipi->tx_chan) { in zynqmp_r5_free_mbox() 309 mbox_free_channel(ipi->tx_chan); in zynqmp_r5_free_mbox() 310 ipi->tx_chan = NULL; in zynqmp_r5_free_mbox() 341 ret = mbox_send_message(ipi->tx_chan, mb_msg); in zynqmp_r5_rproc_kick()
|
| /drivers/usb/renesas_usbhs/ |
| A D | fifo.c | 771 return fifo->tx_chan; in usbhsf_dma_chan_get() 1250 if (fifo->tx_chan) in usbhsf_dma_quit() 1251 dma_release_channel(fifo->tx_chan); in usbhsf_dma_quit() 1255 fifo->tx_chan = NULL; in usbhsf_dma_quit() 1265 fifo->tx_chan = dma_request_channel(mask, usbhsf_dma_filter, in usbhsf_dma_init_pdev() 1286 fifo->tx_chan = dma_request_chan(dev, name); in usbhsf_dma_init_dt() 1287 if (IS_ERR(fifo->tx_chan)) in usbhsf_dma_init_dt() 1288 fifo->tx_chan = NULL; in usbhsf_dma_init_dt() 1306 if (fifo->tx_chan || fifo->rx_chan) in usbhsf_dma_init() 1309 fifo->tx_chan ? "[TX]" : " ", in usbhsf_dma_init()
|
| A D | fifo.h | 25 struct dma_chan *tx_chan; member
|
| /drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
| A D | chcr_ktls.c | 207 SMAC_SEL_V(tx_info->smt_idx) | TX_CHAN_V(tx_info->tx_chan); in chcr_ktls_act_open_req() 212 TX_QUEUE_V(tx_info->adap->params.tp.tx_modq[tx_info->tx_chan]); in chcr_ktls_act_open_req() 259 SMAC_SEL_V(tx_info->smt_idx) | TX_CHAN_V(tx_info->tx_chan); in chcr_ktls_act_open_req6() 389 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_dev_del() 458 tx_info->tx_chan = pi->tx_chan; in chcr_ktls_dev_add() 577 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_dev_add() 673 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_cpl_act_open_rpl() 989 struct sge_eth_txq *q, uint32_t tx_chan) in chcr_ktls_write_tcp_options() argument 1627 TXPKT_INTF_V(tx_info->tx_chan) | in chcr_ktls_tunnel_pkt() 1967 tx_info->tx_chan); in chcr_ktls_xmit() [all …]
|
| A D | chcr_ktls.h | 61 u8 tx_chan; member
|
| /drivers/mmc/host/ |
| A D | au1xmmc.c | 112 u32 tx_chan; member 152 (((h)->flags & HOST_F_XMIT) ? (h)->tx_chan : (h)->rx_chan) 879 host->tx_chan = au1xxx_dbdma_chan_alloc(memid, txid, in au1xmmc_dbdma_init() 881 if (!host->tx_chan) { in au1xmmc_dbdma_init() 890 au1xxx_dbdma_chan_free(host->tx_chan); in au1xmmc_dbdma_init() 894 au1xxx_dbdma_set_devwidth(host->tx_chan, 8); in au1xmmc_dbdma_init() 897 au1xxx_dbdma_ring_alloc(host->tx_chan, AU1XMMC_DESCRIPTOR_COUNT); in au1xmmc_dbdma_init() 910 au1xxx_dbdma_chan_free(host->tx_chan); in au1xmmc_dbdma_shutdown()
|
| A D | omap_hsmmc.c | 195 struct dma_chan *tx_chan; member 813 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in omap_hsmmc_get_dma_chan() 1898 host->tx_chan = dma_request_chan(&pdev->dev, "tx"); in omap_hsmmc_probe() 1899 if (IS_ERR(host->tx_chan)) { in omap_hsmmc_probe() 1901 ret = PTR_ERR(host->tx_chan); in omap_hsmmc_probe() 1914 dma_get_max_seg_size(host->tx_chan->device->dev)); in omap_hsmmc_probe() 1964 if (!IS_ERR_OR_NULL(host->tx_chan)) in omap_hsmmc_probe() 1965 dma_release_channel(host->tx_chan); in omap_hsmmc_probe() 1982 dma_release_channel(host->tx_chan); in omap_hsmmc_remove()
|
| A D | sdhci.c | 1225 host->tx_chan = dma_request_chan(mmc_dev(mmc), "tx"); in sdhci_external_dma_init() 1226 if (IS_ERR(host->tx_chan)) { in sdhci_external_dma_init() 1227 ret = PTR_ERR(host->tx_chan); in sdhci_external_dma_init() 1230 host->tx_chan = NULL; in sdhci_external_dma_init() 1236 if (host->tx_chan) { in sdhci_external_dma_init() 1237 dma_release_channel(host->tx_chan); in sdhci_external_dma_init() 1238 host->tx_chan = NULL; in sdhci_external_dma_init() 1253 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in sdhci_external_dma_channel() 1313 if (host->tx_chan) { in sdhci_external_dma_release() 1314 dma_release_channel(host->tx_chan); in sdhci_external_dma_release() [all …]
|
| /drivers/net/ethernet/xilinx/ |
| A D | xilinx_axienet_main.c | 915 dma_dev = lp->tx_chan->device; in axienet_start_xmit_dmaengine() 1511 if (IS_ERR(lp->tx_chan)) { in axienet_init_dmaengine() 1513 return PTR_ERR(lp->tx_chan); in axienet_init_dmaengine() 1574 dma_release_channel(lp->tx_chan); in axienet_init_dmaengine() 1741 dmaengine_synchronize(lp->tx_chan); in axienet_stop() 1753 dma_release_channel(lp->tx_chan); in axienet_stop() 2997 struct dma_chan *tx_chan; in axienet_probe() local 3005 if (IS_ERR(tx_chan)) { in axienet_probe() 3006 ret = PTR_ERR(tx_chan); in axienet_probe() 3016 dma_release_channel(tx_chan); in axienet_probe() [all …]
|
| A D | xilinx_axienet.h | 621 struct dma_chan *tx_chan; member
|
| /drivers/spi/ |
| A D | atmel-quadspi.c | 288 struct dma_chan *tx_chan; member 847 ret = atmel_qspi_dma_xfer(aq, aq->tx_chan, dma_dst, in atmel_qspi_dma_tx_xfer() 916 if (aq->tx_chan && op->addr.nbytes && in atmel_qspi_sama7g5_transfer() 1293 aq->tx_chan = devm_dma_request_chan(&aq->pdev->dev, "tx"); in atmel_qspi_dma_init() 1294 if (IS_ERR(aq->tx_chan)) { in atmel_qspi_dma_init() 1295 ret = dev_err_probe(&aq->pdev->dev, PTR_ERR(aq->tx_chan), in atmel_qspi_dma_init() 1298 aq->tx_chan = NULL; in atmel_qspi_dma_init() 1303 ctrl->dma_tx = aq->tx_chan; in atmel_qspi_dma_init() 1307 dma_chan_name(aq->tx_chan), dma_chan_name(aq->rx_chan)); in atmel_qspi_dma_init()
|
| /drivers/mailbox/ |
| A D | bcm74110-mailbox.c | 104 int tx_chan; member 120 BCM74110_OFFSET_IO_WRITEL_MACRO(tx, BCM_MBOX_BASE(mbox->tx_chan)); 129 BCM74110_OFFSET_IO_READL_MACRO(tx, BCM_MBOX_BASE(mbox->tx_chan)); 585 ret = of_property_read_u32(dev->of_node, "brcm,tx", &mbox->tx_chan); in bcm74110_mbox_probe()
|
| /drivers/net/ethernet/chelsio/cxgb4/ |
| A D | cxgb4_ethtool.c | 361 t4_get_port_stats_offset(adapter, pi->tx_chan, in get_stats() 399 t4_restart_aneg(p->adapter, p->adapter->pf, p->tx_chan); in restart_autoneg() 755 ret = t4_link_l1cfg(pi->adapter, pi->adapter->mbox, pi->tx_chan, lc); in set_link_ksettings() 854 pi->tx_chan, lc); in set_fecparam() 888 return t4_link_l1cfg(p->adapter, p->adapter->mbox, p->tx_chan, in set_pauseparam() 2021 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2026 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2049 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2082 return t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_eeprom() 2089 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_eeprom() [all …]
|
| A D | sge.c | 1693 ctrl0 = TXPKT_OPCODE_V(CPL_TX_PKT_XT) | TXPKT_INTF_V(pi->tx_chan) | in cxgb4_eth_xmit() 2322 TXPKT_INTF_V(pi->tx_chan) | in ethofld_hard_xmit() 2583 flowc->mnemval[1].val = cpu_to_be32(pi->tx_chan); in cxgb4_ethofld_send_flowc() 2585 flowc->mnemval[2].val = cpu_to_be32(pi->tx_chan); in cxgb4_ethofld_send_flowc() 2696 TXPKT_INTF_V(pi->tx_chan + 4); in cxgb4_selftest_lb_pkt() 4392 c.iqdroprss_to_iqesize = htons(FW_IQ_CMD_IQPCIECH_V(pi->tx_chan) | in t4_sge_alloc_rxq() 4620 FW_EQ_ETH_CMD_PCIECHN_V(pi->tx_chan) | in t4_sge_alloc_eth_txq() 4699 FW_EQ_CTRL_CMD_PCIECHN_V(pi->tx_chan) | in t4_sge_alloc_ctrl_txq() 4772 FW_EQ_OFLD_CMD_PCIECHN_V(pi->tx_chan) | in t4_sge_alloc_ofld_txq()
|
| A D | cxgb4_ptp.c | 186 c.u.init.txchan = pi->tx_chan; in cxgb4_ptp_redirect_rx_packet()
|
| /drivers/target/iscsi/cxgbit/ |
| A D | cxgbit_cm.c | 938 csk->tx_chan = cxgb4_port_chan(ndev); in cxgbit_offload_init() 974 csk->tx_chan = cxgb4_port_chan(ndev); in cxgbit_offload_init() 1171 TX_CHAN_V(csk->tx_chan) | in cxgbit_pass_accept_rpl() 1441 flowc->mnemval[1].val = cpu_to_be32(csk->tx_chan); in cxgbit_send_tx_flowc_wr() 1443 flowc->mnemval[2].val = cpu_to_be32(csk->tx_chan); in cxgbit_send_tx_flowc_wr() 1481 __func__, csk, csk->tx_chan, csk->rss_qid, csk->snd_nxt, in cxgbit_send_tx_flowc_wr()
|
| A D | cxgbit.h | 221 u32 tx_chan; member
|
| /drivers/scsi/cxgbi/cxgb4i/ |
| A D | cxgb4i.c | 222 TX_CHAN_V(csk->tx_chan) | in send_act_open_req() 341 TX_CHAN_V(csk->tx_chan) | in send_act_open_req6() 633 flowc->mnemval[1].val = htonl(csk->tx_chan); in send_tx_flowc_wr() 635 flowc->mnemval[2].val = htonl(csk->tx_chan); in send_tx_flowc_wr() 669 csk, csk->tid, 0, csk->tx_chan, csk->rss_qid, in send_tx_flowc_wr() 1861 csk->tx_chan = cxgb4_port_chan(ndev); in init_act_open() 1893 csk->flags, csk->tx_chan, csk->txq_idx, csk->rss_qid, in init_act_open()
|
| /drivers/dma/qcom/ |
| A D | gpi.c | 2071 struct gchan *tx_chan, *rx_chan; in gpi_find_avail_gpii() local 2079 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN]; in gpi_find_avail_gpii() 2084 if (tx_chan->vc.chan.client_count && tx_chan->seid == seid) in gpi_find_avail_gpii() 2093 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN]; in gpi_find_avail_gpii() 2097 if (tx_chan->vc.chan.client_count || in gpi_find_avail_gpii()
|
| /drivers/infiniband/hw/cxgb4/ |
| A D | cm.c | 620 flowc->mnemval[1].val = cpu_to_be32(ep->tx_chan); in send_flowc() 622 flowc->mnemval[2].val = cpu_to_be32(ep->tx_chan); in send_flowc() 794 TX_CHAN_V(ep->tx_chan) | in send_connect() 2012 TX_CHAN_V(ep->tx_chan) | in send_fw_act_open_req() 2018 TX_QUEUE_V(ep->com.dev->rdev.lldi.tx_modq[ep->tx_chan]) | in send_fw_act_open_req() 2111 ep->tx_chan = cxgb4_port_chan(pdev); in import_ep() 2129 ep->tx_chan = cxgb4_port_chan(pdev); in import_ep() 2236 ep->txq_idx, ep->tx_chan, ep->smac_idx, ep->rss_qid, in c4iw_reconnect() 2450 TX_CHAN_V(ep->tx_chan) | in accept_cr() 2649 child_ep->tx_chan, child_ep->smac_idx, child_ep->rss_qid); in pass_accept_req() [all …]
|
| /drivers/ntb/ |
| A D | ntb_transport.c | 2423 struct dma_chan *rx_chan, *tx_chan; in ntb_transport_max_size() local 2429 tx_chan = qp->tx_dma_chan; in ntb_transport_max_size() 2432 tx_chan ? tx_chan->device->copy_align : 0); in ntb_transport_max_size()
|