Home
last modified time | relevance | path

Searched refs:tx_ring (Results 1 – 25 of 327) sorted by relevance

12345678910>>...14

/linux/drivers/net/ethernet/netronome/nfp/nfd3/
A Drings.c16 while (tx_ring->rd_p != tx_ring->wr_p) { in nfp_nfd3_xsk_tx_bufs_free()
17 idx = D_IDX(tx_ring, tx_ring->rd_p); in nfp_nfd3_xsk_tx_bufs_free()
23 tx_ring->rd_p++; in nfp_nfd3_xsk_tx_bufs_free()
47 while (!tx_ring->is_xdp && tx_ring->rd_p != tx_ring->wr_p) { in nfp_nfd3_tx_ring_reset()
52 idx = D_IDX(tx_ring, tx_ring->rd_p); in nfp_nfd3_tx_ring_reset()
78 tx_ring->rd_p++; in nfp_nfd3_tx_ring_reset()
84 memset(tx_ring->txds, 0, tx_ring->size); in nfp_nfd3_tx_ring_reset()
110 tx_ring->txds, tx_ring->dma); in nfp_nfd3_tx_ring_free()
133 tx_ring->size = array_size(tx_ring->cnt, sizeof(*tx_ring->txds)); in nfp_nfd3_tx_ring_alloc()
134 tx_ring->txds = dma_alloc_coherent(dp->dev, tx_ring->size, in nfp_nfd3_tx_ring_alloc()
[all …]
A Dxsk.c32 wr_idx = D_IDX(tx_ring, tx_ring->wr_p); in nfp_nfd3_xsk_tx_xdp()
51 tx_ring->wr_p++; in nfp_nfd3_xsk_tx_xdp()
283 if (tx_ring->wr_p == tx_ring->rd_p) in nfp_nfd3_xsk_complete()
292 todo = D_IDX(tx_ring, qcp_rd_p - tx_ring->qcp_rd_p); in nfp_nfd3_xsk_complete()
297 tx_ring->qcp_rd_p = D_IDX(tx_ring, tx_ring->qcp_rd_p + todo); in nfp_nfd3_xsk_complete()
303 idx = D_IDX(tx_ring, tx_ring->rd_p); in nfp_nfd3_xsk_complete()
304 tx_ring->rd_p++; in nfp_nfd3_xsk_complete()
326 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_nfd3_xsk_complete()
328 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_nfd3_xsk_complete()
352 wr_idx = D_IDX(tx_ring, tx_ring->wr_p + i); in nfp_nfd3_xsk_tx()
[all …]
A Ddp.c281 qidx, tx_ring->wr_p, tx_ring->rd_p); in nfp_nfd3_tx()
307 wr_idx = D_IDX(tx_ring, tx_ring->wr_p); in nfp_nfd3_tx()
430 if (tx_ring->wr_p == tx_ring->rd_p) in nfp_nfd3_tx_complete()
448 idx = D_IDX(tx_ring, tx_ring->rd_p++); in nfp_nfd3_tx_complete()
501 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_nfd3_tx_complete()
503 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_nfd3_tx_complete()
526 tx_ring->qcp_rd_p = D_IDX(tx_ring, tx_ring->qcp_rd_p + todo); in nfp_nfd3_xdp_complete()
530 idx = D_IDX(tx_ring, tx_ring->rd_p); in nfp_nfd3_xdp_complete()
541 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_nfd3_xdp_complete()
543 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_nfd3_xdp_complete()
[all …]
/linux/drivers/net/ethernet/netronome/nfp/nfdk/
A Drings.c16 while (!tx_ring->is_xdp && tx_ring->rd_p != tx_ring->wr_p) { in nfp_nfdk_tx_ring_reset()
23 rd_idx = D_IDX(tx_ring, tx_ring->rd_p); in nfp_nfdk_tx_ring_reset()
60 memset(tx_ring->txds, 0, tx_ring->size); in nfp_nfdk_tx_ring_reset()
62 tx_ring->wr_p = 0; in nfp_nfdk_tx_ring_reset()
63 tx_ring->rd_p = 0; in nfp_nfdk_tx_ring_reset()
83 tx_ring->ktxds, tx_ring->dma); in nfp_nfdk_tx_ring_free()
85 tx_ring->cnt = 0; in nfp_nfdk_tx_ring_free()
88 tx_ring->dma = 0; in nfp_nfdk_tx_ring_free()
98 tx_ring->size = array_size(tx_ring->cnt, sizeof(*tx_ring->ktxds)); in nfp_nfdk_tx_ring_alloc()
99 tx_ring->ktxds = dma_alloc_coherent(dp->dev, tx_ring->size, in nfp_nfdk_tx_ring_alloc()
[all …]
A Ddp.c280 qidx, tx_ring->wr_p, tx_ring->rd_p); in nfp_nfdk_tx()
301 wr_idx = D_IDX(tx_ring, tx_ring->wr_p); in nfp_nfdk_tx()
565 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_nfdk_tx_complete()
567 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_nfdk_tx_complete()
888 tx_ring->qcp_rd_p = D_IDX(tx_ring, tx_ring->qcp_rd_p + done); in nfp_nfdk_xdp_complete()
891 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_nfdk_xdp_complete()
893 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_nfdk_xdp_complete()
941 wr_idx = D_IDX(tx_ring, tx_ring->wr_p); in nfp_nfdk_tx_xdp_buf()
951 wr_idx = D_IDX(tx_ring, tx_ring->wr_p); in nfp_nfdk_tx_xdp_buf()
1331 tx_ring = r_vec->tx_ring; in nfp_nfdk_ctrl_tx_one()
[all …]
/linux/drivers/net/ethernet/amazon/ena/
A Dena_xdp.c69 &tx_ring->syncp); in ena_xdp_tx_map_frame()
97 tx_ring, in ena_xdp_xmit_frame()
125 struct ena_ring *tx_ring; in ena_xdp_xmit() local
140 tx_ring = &adapter->tx_ring[qid]; in ena_xdp_xmit()
375 if (unlikely(!tx_ring)) in ena_clean_xdp_irq()
411 netif_dbg(tx_ring->adapter, tx_done, tx_ring->netdev, in ena_clean_xdp_irq()
418 netif_dbg(tx_ring->adapter, tx_done, tx_ring->netdev, in ena_clean_xdp_irq()
420 tx_ring->qid, tx_pkts); in ena_clean_xdp_irq()
431 struct ena_ring *tx_ring; in ena_xdp_io_poll() local
435 tx_ring = ena_napi->tx_ring; in ena_xdp_io_poll()
[all …]
A Dena_netdev.c62 tx_ring = &adapter->tx_ring[txqueue]; in ena_tx_timeout()
259 struct ena_ring *tx_ring = &adapter->tx_ring[qid]; in ena_setup_tx_resources() local
326 struct ena_ring *tx_ring = &adapter->tx_ring[qid]; in ena_free_tx_resources() local
762 tx_ring = &adapter->tx_ring[i]; in ena_free_all_tx_bufs()
1470 tx_ring = ena_napi->tx_ring; in ena_io_poll()
1794 tx_ring = &adapter->tx_ring[i]; in ena_init_napi_in_range()
1805 napi->tx_ring = tx_ring; in ena_init_napi_in_range()
1896 tx_ring = &adapter->tx_ring[qid]; in ena_create_io_tx_queue()
2613 tx_ring = &adapter->tx_ring[qid]; in ena_start_xmit()
2818 tx_ring = &adapter->tx_ring[i]; in ena_get_stats64()
[all …]
/linux/drivers/net/ethernet/netronome/nfp/
A Dnfp_net_dp.h53 return (tx_ring->wr_p - tx_ring->rd_p) >= (tx_ring->cnt - dcnt); in nfp_net_tx_full()
59 nfp_qcp_wr_ptr_add(tx_ring->qcp_q, tx_ring->wr_ptr_add); in nfp_net_tx_xmit_more_flush()
60 tx_ring->wr_ptr_add = 0; in nfp_net_tx_xmit_more_flush()
66 if (tx_ring->txrwb) in nfp_net_read_tx_cmpl()
67 return *tx_ring->txrwb; in nfp_net_read_tx_cmpl()
151 struct nfp_net_tx_ring *tx_ring);
156 struct nfp_net_tx_ring *tx_ring);
158 struct nfp_net_tx_ring *tx_ring);
188 dp->ops->tx_ring_free(tx_ring); in nfp_net_tx_ring_free()
193 struct nfp_net_tx_ring *tx_ring) in nfp_net_tx_ring_bufs_alloc() argument
[all …]
A Dnfp_net_debugfs.c83 struct nfp_net_tx_ring *tx_ring; in nfp_tx_q_show() local
90 tx_ring = r_vec->tx_ring; in nfp_tx_q_show()
92 tx_ring = r_vec->xdp_ring; in nfp_tx_q_show()
93 if (!r_vec->nfp_net || !tx_ring) in nfp_tx_q_show()
99 d_rd_p = nfp_qcp_rd_ptr_read(tx_ring->qcp_q); in nfp_tx_q_show()
100 d_wr_p = nfp_qcp_wr_ptr_read(tx_ring->qcp_q); in nfp_tx_q_show()
103 tx_ring->idx, tx_ring->qcidx, in nfp_tx_q_show()
104 tx_ring == r_vec->tx_ring ? "" : "xdp", in nfp_tx_q_show()
105 tx_ring->cnt, &tx_ring->dma, tx_ring->txds, in nfp_tx_q_show()
106 tx_ring->rd_p, tx_ring->wr_p, d_rd_p, d_wr_p); in nfp_tx_q_show()
[all …]
/linux/drivers/net/ethernet/intel/iavf/
A Diavf_txrx.c70 iavf_unmap_and_free_tx_resource(tx_ring, &tx_ring->tx_bi[i]); in iavf_clean_tx_ring()
76 memset(tx_ring->desc, 0, tx_ring->size); in iavf_clean_tx_ring()
101 dma_free_coherent(tx_ring->dev, tx_ring->size, in iavf_free_tx_resources()
102 tx_ring->desc, tx_ring->dma); in iavf_free_tx_resources()
179 if (tx_ring && tx_ring->desc) { in iavf_detect_recover_hung()
322 (IAVF_DESC_UNUSED(tx_ring) != tx_ring->count)) in iavf_clean_tx_irq()
666 tx_ring->size = tx_ring->count * sizeof(struct iavf_tx_desc); in iavf_setup_tx_descriptors()
667 tx_ring->size = ALIGN(tx_ring->size, 4096); in iavf_setup_tx_descriptors()
668 tx_ring->desc = dma_alloc_coherent(dev, tx_ring->size, in iavf_setup_tx_descriptors()
1790 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in iavf_create_tx_ctx()
[all …]
/linux/drivers/net/ethernet/intel/ice/
A Dice_txrx.c49 if (!tx_ring || !tx_ring->desc) in ice_prgm_fdir_fltr()
78 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in ice_prgm_fdir_fltr()
102 writel(tx_ring->next_to_use, tx_ring->tail); in ice_prgm_fdir_fltr()
156 if (ice_ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in ice_clean_tx_ring()
167 ice_unmap_and_free_tx_buf(tx_ring, &tx_ring->tx_buf[i]); in ice_clean_tx_ring()
170 memset(tx_ring->tx_buf, 0, sizeof(*tx_ring->tx_buf) * tx_ring->count); in ice_clean_tx_ring()
198 devm_kfree(tx_ring->dev, tx_ring->tx_buf); in ice_free_tx_ring()
205 tx_ring->desc, tx_ring->dma); in ice_free_tx_ring()
2331 offload.tx_ring = tx_ring; in ice_xmit_frame_ring()
2334 first = &tx_ring->tx_buf[tx_ring->next_to_use]; in ice_xmit_frame_ring()
[all …]
/linux/drivers/infiniband/hw/hfi1/
A Dipoib_tx.c136 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_drain_tx_ring() local
158 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_poll_tx_ring() local
355 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_common() local
479 tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_single()
482 smp_store_release(&tx_ring->tail, CIRC_NEXT(tx_ring->tail, tx_ring->max_items)); in hfi1_ipoib_send_dma_single()
542 tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_list()
545 smp_store_release(&tx_ring->tail, CIRC_NEXT(tx_ring->tail, tx_ring->max_items)); in hfi1_ipoib_send_dma_list()
708 tx_ring = &txq->tx_ring; in hfi1_ipoib_txreq_init()
740 tx_ring = &txq->tx_ring; in hfi1_ipoib_txreq_init()
759 tx_ring = &txq->tx_ring; in hfi1_ipoib_txreq_init()
[all …]
/linux/drivers/net/ethernet/intel/fm10k/
A Dfm10k_main.c775 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tso()
869 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tx_csum()
910 netif_stop_subqueue(tx_ring->netdev, tx_ring->queue_index); in __fm10k_maybe_stop_tx()
920 netif_start_subqueue(tx_ring->netdev, tx_ring->queue_index); in __fm10k_maybe_stop_tx()
1044 i = tx_ring->count; in fm10k_tx_map()
1077 first = &tx_ring->tx_buffer[tx_ring->next_to_use]; in fm10k_xmit_frame_ring()
1193 i -= tx_ring->count; in fm10k_clean_tx_irq()
1235 i -= tx_ring->count; in fm10k_clean_tx_irq()
1255 i -= tx_ring->count; in fm10k_clean_tx_irq()
1267 i += tx_ring->count; in fm10k_clean_tx_irq()
[all …]
/linux/drivers/net/ethernet/intel/i40e/
A Di40e_txrx_common.h50 u64_stats_update_begin(&tx_ring->syncp); in i40e_update_tx_stats()
51 tx_ring->stats.bytes += total_bytes; in i40e_update_tx_stats()
52 tx_ring->stats.packets += total_packets; in i40e_update_tx_stats()
53 u64_stats_update_end(&tx_ring->syncp); in i40e_update_tx_stats()
54 tx_ring->q_vector->tx.total_bytes += total_bytes; in i40e_update_tx_stats()
55 tx_ring->q_vector->tx.total_packets += total_packets; in i40e_update_tx_stats()
70 if (tx_ring->flags & I40E_TXR_FLAGS_WB_ON_ITR) { in i40e_arm_wb()
76 unsigned int j = i40e_get_tx_pending(tx_ring, false); in i40e_arm_wb()
81 (I40E_DESC_UNUSED(tx_ring) != tx_ring->count)) in i40e_arm_wb()
82 tx_ring->arm_wb = true; in i40e_arm_wb()
[all …]
A Di40e_txrx.c35 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_fdir()
149 writel(tx_ring->next_to_use, tx_ring->tail); in i40e_program_fdir_filter()
787 if (ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in i40e_clean_tx_ring()
804 memset(tx_ring->desc, 0, tx_ring->size); in i40e_clean_tx_ring()
829 dma_free_coherent(tx_ring->dev, tx_ring->size, in i40e_free_tx_resources()
830 tx_ring->desc, tx_ring->dma); in i40e_free_tx_resources()
893 if (tx_ring && tx_ring->desc) { in i40e_detect_recover_hung()
1438 tx_ring->size = ALIGN(tx_ring->size, 4096); in i40e_setup_tx_descriptors()
2936 (tx_ring->atr_count < tx_ring->atr_sample_rate)) in i40e_atr()
2946 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_atr()
[all …]
/linux/drivers/net/can/spi/mcp251xfd/
A Dmcp251xfd-tx.c25 tx_head = mcp251xfd_get_tx_head(tx_ring); in mcp251xfd_get_tx_obj_next()
27 return &tx_ring->obj[tx_head]; in mcp251xfd_get_tx_obj_next()
143 tx_ring->head--; in mcp251xfd_tx_failure_drop()
145 tx_head = mcp251xfd_get_tx_head(tx_ring); in mcp251xfd_tx_failure_drop()
176 if (mcp251xfd_get_tx_free(tx_ring) > 0) in mcp251xfd_tx_busy()
187 tx_ring->head, tx_ring->tail, in mcp251xfd_tx_busy()
188 tx_ring->head - tx_ring->tail); in mcp251xfd_tx_busy()
216 if (mcp251xfd_tx_busy(priv, tx_ring) || in mcp251xfd_start_xmit()
224 tx_head = mcp251xfd_get_tx_head(tx_ring); in mcp251xfd_start_xmit()
225 tx_ring->head++; in mcp251xfd_start_xmit()
[all …]
A Dmcp251xfd-tef.c114 const struct mcp251xfd_tx_ring *tx_ring = priv->tx; in mcp251xfd_get_tef_len() local
115 const u8 shift = tx_ring->obj_num_shift_to_u8; in mcp251xfd_get_tef_len()
129 mcp251xfd_get_tx_free(tx_ring) == 0) { in mcp251xfd_get_tef_len()
130 *len_p = tx_ring->obj_num; in mcp251xfd_get_tef_len()
160 const struct mcp251xfd_tx_ring *tx_ring = priv->tx; in mcp251xfd_tef_obj_read() local
164 (offset > tx_ring->obj_num || in mcp251xfd_tef_obj_read()
165 len > tx_ring->obj_num || in mcp251xfd_tef_obj_read()
166 offset + len > tx_ring->obj_num)) { in mcp251xfd_tef_obj_read()
169 tx_ring->obj_num, offset, len); in mcp251xfd_tef_obj_read()
230 struct mcp251xfd_tx_ring *tx_ring = priv->tx; in mcp251xfd_handle_tefif() local
[all …]
/linux/drivers/net/ethernet/freescale/enetc/
A Denetc.c48 int index = &priv->tx_ring[tx_ring->index] - priv->xdp_tx_ring; in enetc_rx_ring_from_xdp_tx_ring()
109 enetc_wr_reg_hot(tx_ring->tpir, tx_ring->next_to_use); in enetc_update_tx_ring_tail()
604 tx_ring = priv->tx_ring[skb->queue_mapping]; in enetc_start_xmit()
890 enetc_wr_reg_hot(tx_ring->idr, BIT(tx_ring->index) | in enetc_clean_tx_ring()
1388 prefetchw(ENETC_TXBD(*tx_ring, tx_ring->next_to_use)); in enetc_xdp_xmit()
1845 struct enetc_bdr *tx_ring = priv->tx_ring[i]; in enetc_alloc_tx_resources() local
2683 tx_ring = priv->tx_ring[i]; in enetc_reset_tc_mqprio()
2684 tx_ring->prio = 0; in enetc_reset_tc_mqprio()
2685 enetc_set_bdr_prio(hw, tx_ring->index, tx_ring->prio); in enetc_reset_tc_mqprio()
2725 tx_ring = priv->tx_ring[q]; in enetc_setup_tc_mqprio()
[all …]
/linux/drivers/net/ethernet/intel/igbvf/
A Dnetdev.c429 tx_ring->size = ALIGN(tx_ring->size, 4096); in igbvf_setup_tx_resources()
431 tx_ring->desc = dma_alloc_coherent(&pdev->dev, tx_ring->size, in igbvf_setup_tx_resources()
516 memset(tx_ring->desc, 0, tx_ring->size); in igbvf_clean_tx_ring()
540 dma_free_coherent(&pdev->dev, tx_ring->size, tx_ring->desc, in igbvf_free_tx_resources()
874 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_intr_msix_tx() local
978 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_configure_msix() local
986 writel(tx_ring->itr_val, hw->hw_addr + tx_ring->itr_register); in igbvf_configure_msix()
1291 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_configure_tx() local
1919 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_watchdog_task() local
1989 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in igbvf_tx_ctxtdesc()
[all …]
/linux/drivers/net/ethernet/wangxun/libwx/
A Dwx_lib.c880 netif_stop_subqueue(tx_ring->netdev, tx_ring->queue_index); in wx_maybe_stop_tx()
892 netif_start_subqueue(tx_ring->netdev, tx_ring->queue_index); in wx_maybe_stop_tx()
1075 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in wx_tx_ctxtdesc()
1478 first = &tx_ring->tx_buffer_info[tx_ring->next_to_use]; in wx_xmit_frame_ring()
1534 tx_ring = wx->tx_ring[r_idx]; in wx_xmit_frame()
2394 dma_free_coherent(tx_ring->dev, tx_ring->size, in wx_free_tx_resources()
2395 tx_ring->desc, tx_ring->dma); in wx_free_tx_resources()
2560 tx_ring->size = tx_ring->count * sizeof(union wx_tx_desc); in wx_setup_tx_resources()
2561 tx_ring->size = ALIGN(tx_ring->size, 4096); in wx_setup_tx_resources()
2564 tx_ring->desc = dma_alloc_coherent(dev, tx_ring->size, in wx_setup_tx_resources()
[all …]
/linux/drivers/net/ethernet/qlogic/qlcnic/
A Dqlcnic_io.c968 tx_ring = sds_ring->tx_ring; in qlcnic_poll()
1607 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_add()
1632 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_del()
1660 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_enable()
1688 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_disable()
1959 tx_ring = adapter->tx_ring; in qlcnic_83xx_msix_sriov_vf_poll()
1987 tx_ring = adapter->tx_ring; in qlcnic_83xx_poll()
2063 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_enable()
2091 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_disable()
2134 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_add()
[all …]
/linux/drivers/net/ethernet/broadcom/
A Dbcm4908_enet.c198 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_dma_free() local
209 if (tx_ring->cpu_addr) in bcm4908_enet_dma_free()
210 dma_free_coherent(dev, size, tx_ring->cpu_addr, tx_ring->dma_addr); in bcm4908_enet_dma_free()
216 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_dma_alloc() local
222 tx_ring->is_tx = 1; in bcm4908_enet_dma_alloc()
451 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_open() local
501 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_stop() local
661 while (handled < weight && tx_ring->read_idx != tx_ring->write_idx) { in bcm4908_enet_poll_tx()
662 buf_desc = &tx_ring->buf_desc[tx_ring->read_idx]; in bcm4908_enet_poll_tx()
665 slot = &tx_ring->slots[tx_ring->read_idx]; in bcm4908_enet_poll_tx()
[all …]
/linux/drivers/net/ethernet/mscc/
A Docelot_fdma.c69 struct ocelot_fdma_tx_ring *tx_ring = &fdma->tx_ring; in ocelot_fdma_tx_ring_free() local
71 if (tx_ring->next_to_use >= tx_ring->next_to_clean) in ocelot_fdma_tx_ring_free()
73 (tx_ring->next_to_use - tx_ring->next_to_clean) - 1; in ocelot_fdma_tx_ring_free()
75 return tx_ring->next_to_clean - tx_ring->next_to_use - 1; in ocelot_fdma_tx_ring_free()
80 struct ocelot_fdma_tx_ring *tx_ring = &fdma->tx_ring; in ocelot_fdma_tx_ring_empty() local
82 return tx_ring->next_to_clean == tx_ring->next_to_use; in ocelot_fdma_tx_ring_empty()
495 tx_ring = &fdma->tx_ring; in ocelot_fdma_tx_cleanup()
610 struct ocelot_fdma_tx_ring *tx_ring = &fdma->tx_ring; in ocelot_fdma_send_skb() local
616 dcb = &tx_ring->dcbs[tx_ring->next_to_use]; in ocelot_fdma_send_skb()
617 tx_buf = &tx_ring->bufs[tx_ring->next_to_use]; in ocelot_fdma_send_skb()
[all …]
/linux/drivers/net/ethernet/intel/igc/
A Digc_xdp.c48 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_enable_pool() local
77 tx_ring = adapter->tx_ring[queue_id]; in igc_xdp_enable_pool()
83 igc_disable_tx_ring(tx_ring); in igc_xdp_enable_pool()
88 set_bit(IGC_RING_FLAG_AF_XDP_ZC, &tx_ring->flags); in igc_xdp_enable_pool()
93 igc_enable_tx_ring(tx_ring); in igc_xdp_enable_pool()
107 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_disable_pool() local
123 tx_ring = adapter->tx_ring[queue_id]; in igc_xdp_disable_pool()
129 igc_disable_tx_ring(tx_ring); in igc_xdp_disable_pool()
135 clear_bit(IGC_RING_FLAG_AF_XDP_ZC, &tx_ring->flags); in igc_xdp_disable_pool()
140 igc_enable_tx_ring(tx_ring); in igc_xdp_disable_pool()
A Digc_dump.c118 struct igc_ring *tx_ring; in igc_rings_dump() local
138 tx_ring = adapter->tx_ring[n]; in igc_rings_dump()
139 buffer_info = &tx_ring->tx_buffer_info[tx_ring->next_to_clean]; in igc_rings_dump()
142 n, tx_ring->next_to_use, tx_ring->next_to_clean, in igc_rings_dump()
167 tx_ring = adapter->tx_ring[n]; in igc_rings_dump()
170 tx_ring->queue_index); in igc_rings_dump()
174 for (i = 0; tx_ring->desc && (i < tx_ring->count); i++) { in igc_rings_dump()
178 tx_desc = IGC_TX_DESC(tx_ring, i); in igc_rings_dump()
181 if (i == tx_ring->next_to_use && in igc_rings_dump()
182 i == tx_ring->next_to_clean) in igc_rings_dump()
[all …]

Completed in 119 milliseconds

12345678910>>...14