Searched refs:tx_chn (Results 1 – 10 of 10) sorted by relevance
| /drivers/dma/ti/ |
| A D | k3-udma-glue.c | 136 if (tx_chn) in of_k3_udma_glue_parse_chn_common() 146 bool tx_chn) in of_k3_udma_glue_parse_chn() argument 292 tx_chn->udma_tchanx = xudma_tchan_get(tx_chn->common.udmax, in k3_udma_glue_request_tx_chn_common() 304 tx_chn->udma_tchan_id, tx_chn->common.dst_thread); in k3_udma_glue_request_tx_chn_common() 325 tx_chn->udma_tflow_id = tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn_common() 382 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn() 383 if (!tx_chn) in k3_udma_glue_request_tx_chn() 403 return tx_chn; in k3_udma_glue_request_tx_chn() 419 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn_for_thread_id() 420 if (!tx_chn) in k3_udma_glue_request_tx_chn_for_thread_id() [all …]
|
| /drivers/net/ethernet/ti/icssg/ |
| A D | icssg_common.c | 71 if (tx_chn->tx_chn) in prueth_cleanup_tx_chns() 72 k3_udma_glue_release_tx_chn(tx_chn->tx_chn); in prueth_cleanup_tx_chns() 77 memset(tx_chn, 0, sizeof(*tx_chn)); in prueth_cleanup_tx_chns() 90 free_irq(tx_chn->irq, tx_chn); in prueth_ndev_del_tx_napi() 315 snprintf(tx_chn->name, sizeof(tx_chn->name), in prueth_init_tx_chns() 322 tx_chn->tx_chn = in prueth_init_tx_chns() 325 if (IS_ERR(tx_chn->tx_chn)) { in prueth_init_tx_chns() 326 ret = PTR_ERR(tx_chn->tx_chn); in prueth_init_tx_chns() 327 tx_chn->tx_chn = NULL; in prueth_init_tx_chns() 333 tx_chn->dma_dev = k3_udma_glue_tx_get_dma_device(tx_chn->tx_chn); in prueth_init_tx_chns() [all …]
|
| A D | icssg_ethtool.c | 216 struct prueth_tx_chn *tx_chn; in emac_get_coalesce() local 218 tx_chn = &emac->tx_chns[0]; in emac_get_coalesce() 221 coal->tx_coalesce_usecs = tx_chn->tx_pace_timeout_ns / 1000; in emac_get_coalesce() 230 struct prueth_tx_chn *tx_chn; in emac_get_per_queue_coalesce() local 235 tx_chn = &emac->tx_chns[queue]; in emac_get_per_queue_coalesce() 237 coal->tx_coalesce_usecs = tx_chn->tx_pace_timeout_ns / 1000; in emac_get_per_queue_coalesce() 249 struct prueth_tx_chn *tx_chn; in emac_set_coalesce() local 251 tx_chn = &emac->tx_chns[0]; in emac_set_coalesce() 268 tx_chn->tx_pace_timeout_ns = coal->tx_coalesce_usecs * 1000; in emac_set_coalesce() 278 struct prueth_tx_chn *tx_chn; in emac_set_per_queue_coalesce() local [all …]
|
| A D | icssg_prueth_sr1.c | 86 struct prueth_tx_chn *tx_chn; in emac_send_command_sr1() local 98 tx_chn = &emac->tx_chns[emac->tx_ch_num - 1]; in emac_send_command_sr1() 101 buf_dma = dma_map_single(tx_chn->dma_dev, data, pkt_len, DMA_TO_DEVICE); in emac_send_command_sr1() 102 if (dma_mapping_error(tx_chn->dma_dev, buf_dma)) { in emac_send_command_sr1() 108 first_desc = k3_cppi_desc_pool_alloc(tx_chn->desc_pool); in emac_send_command_sr1() 111 dma_unmap_single(tx_chn->dma_dev, buf_dma, pkt_len, DMA_TO_DEVICE); in emac_send_command_sr1() 129 desc_dma = k3_cppi_desc_pool_virt2dma(tx_chn->desc_pool, first_desc); in emac_send_command_sr1() 133 ret = k3_udma_glue_push_tx_chn(tx_chn->tx_chn, first_desc, desc_dma); in emac_send_command_sr1() 146 prueth_xmit_free(tx_chn, first_desc); in emac_send_command_sr1() 590 ret = k3_udma_glue_enable_tx_chn(emac->tx_chns[i].tx_chn); in emac_ndo_open() [all …]
|
| A D | icssg_prueth.h | 121 struct k3_udma_glue_tx_channel *tx_chn; member 454 void prueth_xmit_free(struct prueth_tx_chn *tx_chn,
|
| A D | icssg_prueth.c | 788 ret = k3_udma_glue_enable_tx_chn(emac->tx_chns[i].tx_chn); in emac_ndo_open() 867 k3_udma_glue_tdown_tx_chn(emac->tx_chns[i].tx_chn, false); in emac_ndo_stop()
|
| /drivers/net/ethernet/ti/ |
| A D | am65-cpsw-nuss.c | 667 k3_udma_glue_reset_tx_chn(tx_chn->tx_chn, tx_chn, in am65_cpsw_destroy_txq() 669 k3_udma_glue_disable_tx_chn(tx_chn->tx_chn); in am65_cpsw_destroy_txq() 2190 if (!IS_ERR_OR_NULL(tx_chn->tx_chn)) in am65_cpsw_nuss_free_tx_chns() 2193 memset(tx_chn, 0, sizeof(*tx_chn)); in am65_cpsw_nuss_free_tx_chns() 2233 tx_chn->tx_chn_name, tx_chn); in am65_cpsw_nuss_ndev_add_tx_napi() 2236 tx_chn->id, tx_chn->irq, ret); in am65_cpsw_nuss_ndev_add_tx_napi() 2247 devm_free_irq(dev, tx_chn->irq, tx_chn); in am65_cpsw_nuss_ndev_add_tx_napi() 2287 tx_chn->tx_chn = in am65_cpsw_nuss_init_tx_chns() 2291 if (IS_ERR(tx_chn->tx_chn)) { in am65_cpsw_nuss_init_tx_chns() 2296 tx_chn->dma_dev = k3_udma_glue_tx_get_dma_device(tx_chn->tx_chn); in am65_cpsw_nuss_init_tx_chns() [all …]
|
| A D | am65-cpsw-ethtool.c | 920 struct am65_cpsw_tx_chn *tx_chn; in am65_cpsw_get_per_queue_coalesce() local 925 tx_chn = &common->tx_chns[queue]; in am65_cpsw_get_per_queue_coalesce() 926 coal->tx_coalesce_usecs = tx_chn->tx_pace_timeout / 1000; in am65_cpsw_get_per_queue_coalesce() 946 struct am65_cpsw_tx_chn *tx_chn; in am65_cpsw_set_per_queue_coalesce() local 951 tx_chn = &common->tx_chns[queue]; in am65_cpsw_set_per_queue_coalesce() 955 tx_chn->tx_pace_timeout = coal->tx_coalesce_usecs * 1000; in am65_cpsw_set_per_queue_coalesce()
|
| A D | am65-cpsw-qos.c | 1205 struct am65_cpsw_tx_chn *tx_chn; in am65_cpsw_qos_ndo_tx_p0_set_maxrate() local 1247 tx_chn = &common->tx_chns[queue]; in am65_cpsw_qos_ndo_tx_p0_set_maxrate() 1248 tx_chn->rate_mbps = rate_mbps; in am65_cpsw_qos_ndo_tx_p0_set_maxrate() 1268 struct am65_cpsw_tx_chn *tx_chn = &common->tx_chns[tx_ch]; in am65_cpsw_qos_tx_p0_rate_init() local 1271 if (!tx_chn->rate_mbps) in am65_cpsw_qos_tx_p0_rate_init() 1274 ch_cir = am65_cpsw_qos_tx_rate_calc(tx_chn->rate_mbps, in am65_cpsw_qos_tx_p0_rate_init()
|
| A D | am65-cpsw-nuss.h | 83 struct k3_udma_glue_tx_channel *tx_chn; member
|
Completed in 38 milliseconds