Home
last modified time | relevance | path

Searched refs:rx_ring (Results 1 – 25 of 288) sorted by relevance

12345678910>>...12

/drivers/net/ethernet/netronome/nfp/
A Dnfp_net_dp.c85 rx_ring->fl_qcidx = rx_ring->idx * nn->stride_rx; in nfp_net_rx_ring_init()
102 if (rx_ring->wr_p == 0 && rx_ring->rd_p == 0) in nfp_net_rx_ring_reset()
106 wr_idx = D_IDX(rx_ring, rx_ring->wr_p); in nfp_net_rx_ring_reset()
109 rx_ring->xsk_rxbufs[wr_idx] = rx_ring->xsk_rxbufs[last_idx]; in nfp_net_rx_ring_reset()
113 rx_ring->rxbufs[wr_idx] = rx_ring->rxbufs[last_idx]; in nfp_net_rx_ring_reset()
117 memset(rx_ring->rxds, 0, rx_ring->size); in nfp_net_rx_ring_reset()
266 rx_ring->rxds, rx_ring->dma); in nfp_net_rx_ring_free()
300 rx_ring->idx, rx_ring->r_vec->napi.napi_id); in nfp_net_rx_ring_alloc()
310 rx_ring->size = array_size(rx_ring->cnt, sizeof(*rx_ring->rxds)); in nfp_net_rx_ring_alloc()
311 rx_ring->rxds = dma_alloc_coherent(dp->dev, rx_ring->size, in nfp_net_rx_ring_alloc()
[all …]
A Dnfp_net_xsk.c24 rx_ring->rxds[idx].fld.reserved = 0; in nfp_net_xsk_rx_bufs_stash()
25 rx_ring->rxds[idx].fld.meta_len_dd = 0; in nfp_net_xsk_rx_bufs_stash()
27 rx_ring->xsk_rxbufs[idx].xdp = xdp; in nfp_net_xsk_rx_bufs_stash()
28 rx_ring->xsk_rxbufs[idx].dma_addr = in nfp_net_xsk_rx_bufs_stash()
50 if (!rx_ring->cnt) in nfp_net_xsk_rx_bufs_free()
53 for (i = 0; i < rx_ring->cnt - 1; i++) in nfp_net_xsk_rx_bufs_free()
54 nfp_net_xsk_rx_free(&rx_ring->xsk_rxbufs[i]); in nfp_net_xsk_rx_bufs_free()
64 while (nfp_net_rx_space(rx_ring)) { in nfp_net_xsk_rx_ring_fill_freelist()
65 wr_idx = D_IDX(rx_ring, rx_ring->wr_p); in nfp_net_xsk_rx_ring_fill_freelist()
78 rx_ring->xsk_rxbufs[wr_idx].dma_addr); in nfp_net_xsk_rx_ring_fill_freelist()
[all …]
A Dnfp_net_debugfs.c15 struct nfp_net_rx_ring *rx_ring; in nfp_rx_q_show() local
24 if (!r_vec->nfp_net || !r_vec->rx_ring) in nfp_rx_q_show()
27 rx_ring = r_vec->rx_ring; in nfp_rx_q_show()
31 rxd_cnt = rx_ring->cnt; in nfp_rx_q_show()
37 rx_ring->idx, rx_ring->fl_qcidx, in nfp_rx_q_show()
38 rx_ring->cnt, &rx_ring->dma, rx_ring->rxds, in nfp_rx_q_show()
39 rx_ring->rd_p, rx_ring->wr_p, fl_rd_p, fl_wr_p); in nfp_rx_q_show()
42 rxd = &rx_ring->rxds[i]; in nfp_rx_q_show()
51 if (rx_ring->rxbufs[i].dma_addr) in nfp_rx_q_show()
60 if (i == rx_ring->rd_p % rxd_cnt) in nfp_rx_q_show()
[all …]
/drivers/net/ethernet/intel/igb/
A Digb_xsk.c37 struct igb_ring *rx_ring = adapter->rx_ring[qid]; in igb_txrx_ring_disable() local
51 igb_clean_rx_ring(rx_ring); in igb_txrx_ring_disable()
53 memset(&rx_ring->rx_stats, 0, sizeof(rx_ring->rx_stats)); in igb_txrx_ring_disable()
60 struct igb_ring *rx_ring = adapter->rx_ring[qid]; in igb_txrx_ring_enable() local
73 if (rx_ring->xsk_pool) in igb_txrx_ring_enable()
74 igb_alloc_rx_buffers_zc(rx_ring, rx_ring->xsk_pool, in igb_txrx_ring_enable()
77 igb_alloc_rx_buffers(rx_ring, igb_desc_unused(rx_ring)); in igb_txrx_ring_enable()
102 struct igb_ring *rx_ring; in igb_xsk_pool_enable() local
117 rx_ring = adapter->rx_ring[qid]; in igb_xsk_pool_enable()
150 rx_ring = adapter->rx_ring[qid]; in igb_xsk_pool_disable()
[all …]
/drivers/net/ethernet/intel/ice/
A Dice_txrx.c429 memset(rx_ring->xdp_buf, 0, array_size(rx_ring->count, sizeof(*rx_ring->xdp_buf))); in ice_clean_rx_ring()
431 memset(rx_ring->rx_buf, 0, array_size(rx_ring->count, sizeof(*rx_ring->rx_buf))); in ice_clean_rx_ring()
471 rx_ring->desc, rx_ring->dma); in ice_free_rx_ring()
492 rx_ring->rx_buf = in ice_setup_rx_ring()
493 kcalloc(rx_ring->count, sizeof(*rx_ring->rx_buf), GFP_KERNEL); in ice_setup_rx_ring()
500 rx_ring->desc = dmam_alloc_coherent(dev, size, &rx_ring->dma, in ice_setup_rx_ring()
513 WRITE_ONCE(rx_ring->xdp_prog, rx_ring->vsi->xdp_prog); in ice_setup_rx_ring()
925 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in ice_reuse_rx_page()
1078 rx_buf = &rx_ring->rx_buf[rx_ring->first_desc]; in ice_construct_skb()
1251 ice_init_ctrl_rx_descs(rx_ring, ICE_RX_DESC_UNUSED(rx_ring)); in ice_clean_ctrl_rx_irq()
[all …]
A Dice_xsk.c18 return &rx_ring->xdp_buf[idx]; in ice_xdp_buf()
98 reg = rx_ring->reg_idx; in ice_qvec_dis_irq()
168 struct ice_rx_ring *rx_ring; in ice_qp_dis() local
328 kfree(rx_ring->rx_buf); in ice_realloc_rx_xdp_bufs()
329 rx_ring->rx_buf = NULL; in ice_realloc_rx_xdp_bufs()
332 kfree(rx_ring->xdp_buf); in ice_realloc_rx_xdp_bufs()
333 rx_ring->xdp_buf = NULL; in ice_realloc_rx_xdp_bufs()
356 if (!rx_ring->xsk_pool) in ice_realloc_zc_buf()
504 if (ntu == rx_ring->count) in __ice_alloc_rx_bufs_zc()
824 u32 cnt = rx_ring->count; in ice_clean_rx_irq_zc()
[all …]
/drivers/net/ethernet/intel/i40e/
A Di40e_xsk.c12 memset(rx_ring->rx_bi_zc, 0, in i40e_clear_rx_bi_zc()
13 sizeof(*rx_ring->rx_bi_zc) * rx_ring->count); in i40e_clear_rx_bi_zc()
18 return &rx_ring->rx_bi_zc[idx]; in i40e_rx_bi()
40 kfree(rx_ring->rx_bi); in i40e_realloc_rx_xdp_bi()
41 rx_ring->rx_bi = NULL; in i40e_realloc_rx_xdp_bi()
44 kfree(rx_ring->rx_bi_zc); in i40e_realloc_rx_xdp_bi()
45 rx_ring->rx_bi_zc = NULL; in i40e_realloc_rx_xdp_bi()
46 rx_ring->rx_bi = sw_ring; in i40e_realloc_rx_xdp_bi()
62 struct i40e_ring *rx_ring; in i40e_realloc_rx_bi_zc() local
66 rx_ring = vsi->rx_rings[q]; in i40e_realloc_rx_bi_zc()
[all …]
A Di40e_txrx.c1461 memset(rx_ring->rx_bi, 0, sizeof(*rx_ring->rx_bi) * rx_ring->count); in i40e_clear_rx_bi()
1516 memset(rx_ring->desc, 0, rx_ring->size); in i40e_clean_rx_ring()
1540 dma_free_coherent(rx_ring->dev, rx_ring->size, in i40e_free_rx_resources()
1541 rx_ring->desc, rx_ring->dma); in i40e_free_rx_resources()
1560 rx_ring->size = ALIGN(rx_ring->size, 4096); in i40e_setup_rx_descriptors()
1561 rx_ring->desc = dma_alloc_coherent(dev, rx_ring->size, in i40e_setup_rx_descriptors()
1575 rx_ring->xdp_prog = rx_ring->vsi->xdp_prog; in i40e_setup_rx_descriptors()
2160 if (++rx_ring->next_to_clean == rx_ring->count) in i40e_construct_skb()
2359 rx_ring->vsi->xdp_rings[rx_ring->queue_index]; in i40e_finalize_xdp_rx()
2423 rx_ring->next_to_clean = rx_ring->next_to_process; in i40e_consume_xdp_buff()
[all …]
/drivers/net/ethernet/intel/ixgbe/
A Dixgbe_xsk.c165 i -= rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
187 i -= rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
196 i += rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
271 rx_desc = IXGBE_RX_DESC(rx_ring, rx_ring->next_to_clean); in ixgbe_clean_rx_irq_zc()
282 bi = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbe_clean_rx_irq_zc()
290 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
292 &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbe_clean_rx_irq_zc()
301 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
325 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
340 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
[all …]
/drivers/net/can/spi/mcp251xfd/
A Dmcp251xfd-ring.c210 rx_ring->head = 0; in mcp251xfd_ring_init_rx()
211 rx_ring->tail = 0; in mcp251xfd_ring_init_rx()
213 rx_ring->nr = i; in mcp251xfd_ring_init_rx()
216 *base = mcp251xfd_get_rx_obj_addr(rx_ring, rx_ring->obj_num); in mcp251xfd_ring_init_rx()
225 rx_ring->irq_enable_xfer.tx_buf = &rx_ring->irq_enable_buf; in mcp251xfd_ring_init_rx()
347 rx_ring->nr, rx_ring->fifo_nr, in mcp251xfd_ring_init()
366 rx_ring->nr, rx_ring->fifo_nr, in mcp251xfd_ring_init()
368 rx_ring->obj_num, rx_ring->obj_size, in mcp251xfd_ring_init()
369 rx_ring->obj_num * rx_ring->obj_size); in mcp251xfd_ring_init()
529 rx_ring = kzalloc(sizeof(*rx_ring) + rx_obj_size * rx_obj_num, in mcp251xfd_ring_alloc()
[all …]
/drivers/net/ethernet/amazon/ena/
A Dena_netdev.c370 struct ena_ring *rx_ring = &adapter->rx_ring[qid]; in ena_setup_rx_resources() local
643 rx_ring = &adapter->rx_ring[i]; in ena_refill_all_rx_bufs()
1351 struct ena_ring *rx_ring = ena_napi->rx_ring; in ena_adjust_adaptive_rx_intr_moderation() local
1448 rx_ring = ena_napi->rx_ring; in ena_io_poll()
1777 rx_ring = &adapter->rx_ring[i]; in ena_init_napi_in_range()
1787 napi->rx_ring = rx_ring; in ena_init_napi_in_range()
1968 rx_ring = &adapter->rx_ring[qid]; in ena_create_io_rx_queue()
2374 rx_ring = &adapter->rx_ring[i]; in ena_set_rx_copybreak()
2838 rx_ring = &adapter->rx_ring[i]; in ena_get_stats64()
3535 rx_ring = &adapter->rx_ring[qid]; in check_for_missing_completions()
[all …]
A Dena_xdp.h90 xdp_prog = READ_ONCE(rx_ring->xdp_bpf_prog); in ena_xdp_execute()
99 xdp_stat = &rx_ring->rx_stats.xdp_aborted; in ena_xdp_execute()
105 xdp_ring = rx_ring->xdp_ring; in ena_xdp_execute()
115 xdp_stat = &rx_ring->rx_stats.xdp_tx; in ena_xdp_execute()
120 xdp_stat = &rx_ring->rx_stats.xdp_redirect; in ena_xdp_execute()
125 xdp_stat = &rx_ring->rx_stats.xdp_aborted; in ena_xdp_execute()
130 xdp_stat = &rx_ring->rx_stats.xdp_aborted; in ena_xdp_execute()
134 xdp_stat = &rx_ring->rx_stats.xdp_drop; in ena_xdp_execute()
138 xdp_stat = &rx_ring->rx_stats.xdp_pass; in ena_xdp_execute()
143 xdp_stat = &rx_ring->rx_stats.xdp_invalid; in ena_xdp_execute()
[all …]
A Dena_xdp.c200 rc = xdp_rxq_info_reg(&rx_ring->xdp_rxq, rx_ring->netdev, rx_ring->qid, 0); in ena_xdp_register_rxq_info()
202 netif_dbg(rx_ring->adapter, ifup, rx_ring->netdev, "Registering RX info for queue %d", in ena_xdp_register_rxq_info()
203 rx_ring->qid); in ena_xdp_register_rxq_info()
205 netif_err(rx_ring->adapter, ifup, rx_ring->netdev, in ena_xdp_register_rxq_info()
207 rx_ring->qid, rc); in ena_xdp_register_rxq_info()
214 netif_err(rx_ring->adapter, ifup, rx_ring->netdev, in ena_xdp_register_rxq_info()
216 rx_ring->qid, rc); in ena_xdp_register_rxq_info()
226 netif_dbg(rx_ring->adapter, ifdown, rx_ring->netdev, in ena_xdp_unregister_rxq_info()
228 rx_ring->qid); in ena_xdp_unregister_rxq_info()
238 struct ena_ring *rx_ring; in ena_xdp_exchange_program_rx_in_range() local
[all …]
/drivers/net/ethernet/intel/iavf/
A Diavf_txrx.c717 if (rx_ring->skb) { in iavf_clean_rx_ring()
723 for (u32 i = rx_ring->next_to_clean; i != rx_ring->next_to_use; ) { in iavf_clean_rx_ring()
746 .pp = rx_ring->pp, in iavf_free_rx_resources()
752 dma_free_coherent(rx_ring->pp->p.dev, rx_ring->size, in iavf_free_rx_resources()
753 rx_ring->desc, rx_ring->dma); in iavf_free_rx_resources()
789 rx_ring->size = rx_ring->count * sizeof(struct iavf_rx_desc); in iavf_setup_rx_descriptors()
790 rx_ring->size = ALIGN(rx_ring->size, 4096); in iavf_setup_rx_descriptors()
791 rx_ring->desc = dma_alloc_coherent(fq.pp->p.dev, rx_ring->size, in iavf_setup_rx_descriptors()
796 rx_ring->size); in iavf_setup_rx_descriptors()
1407 rx_desc = IAVF_RX_DESC(rx_ring, rx_ring->next_to_clean); in iavf_clean_rx_irq()
[all …]
/drivers/net/ethernet/freescale/enetc/
A Denetc.c1099 new = &rx_ring->rx_swbd[rx_ring->next_to_alloc]; in enetc_reuse_page()
1102 enetc_bdr_idx_inc(rx_ring, &rx_ring->next_to_alloc); in enetc_reuse_page()
1334 enetc_wr_reg_hot(rx_ring->rcir, rx_ring->next_to_use); in enetc_refill_rx_ring()
1491 enetc_put_rx_buff(rx_ring, &rx_ring->rx_swbd[*i]); in enetc_check_bd_errors_and_consume()
1498 enetc_put_rx_buff(rx_ring, &rx_ring->rx_swbd[*i]); in enetc_check_bd_errors_and_consume()
1575 enetc_wr_reg_hot(rx_ring->idr, BIT(rx_ring->index)); in enetc_clean_rx_ring()
1925 enetc_wr_reg_hot(rx_ring->idr, BIT(rx_ring->index)); in enetc_clean_rx_ring_xdp()
2048 struct enetc_bdr *rx_ring = &v->rx_ring; in enetc_poll() local
2284 struct enetc_bdr *rx_ring = priv->rx_ring[i]; in enetc_alloc_rx_resources() local
3177 struct enetc_bdr *rx_ring = priv->rx_ring[i]; in enetc_reconfigure_xdp_cb() local
[all …]
/drivers/net/ethernet/intel/igc/
A Digc_xdp.c30 igc_disable_rx_ring(adapter->rx_ring[i]); in igc_xdp_set_prog()
61 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_enable_pool() local
89 rx_ring = adapter->rx_ring[queue_id]; in igc_xdp_enable_pool()
92 napi = &rx_ring->q_vector->napi; in igc_xdp_enable_pool()
95 igc_disable_rx_ring(rx_ring); in igc_xdp_enable_pool()
105 igc_enable_rx_ring(rx_ring); in igc_xdp_enable_pool()
120 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_disable_pool() local
135 rx_ring = adapter->rx_ring[queue_id]; in igc_xdp_disable_pool()
138 napi = &rx_ring->q_vector->napi; in igc_xdp_disable_pool()
141 igc_disable_rx_ring(rx_ring); in igc_xdp_disable_pool()
[all …]
/drivers/net/ethernet/broadcom/
A Dbcm4908_enet.c199 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_dma_free() local
205 dma_free_coherent(dev, size, rx_ring->cpu_addr, rx_ring->dma_addr); in bcm4908_enet_dma_free()
217 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_dma_alloc() local
232 rx_ring->is_tx = 0; in bcm4908_enet_dma_alloc()
321 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_dma_uninit() local
338 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_dma_init() local
452 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_open() local
502 struct bcm4908_enet_dma_ring *rx_ring = &enet->rx_ring; in bcm4908_enet_stop() local
596 buf_desc = &enet->rx_ring.buf_desc[enet->rx_ring.read_idx]; in bcm4908_enet_poll_rx()
601 slot = enet->rx_ring.slots[enet->rx_ring.read_idx]; in bcm4908_enet_poll_rx()
[all …]
/drivers/net/ethernet/mscc/
A Docelot_fdma.c58 struct ocelot_fdma_rx_ring *rx_ring = &fdma->rx_ring; in ocelot_fdma_rx_ring_free() local
60 if (rx_ring->next_to_use >= rx_ring->next_to_clean) in ocelot_fdma_rx_ring_free()
62 (rx_ring->next_to_use - rx_ring->next_to_clean) - 1; in ocelot_fdma_rx_ring_free()
64 return rx_ring->next_to_clean - rx_ring->next_to_use - 1; in ocelot_fdma_rx_ring_free()
156 rx_ring = &fdma->rx_ring; in ocelot_fdma_alloc_rx_buffs()
244 rx_ring = &fdma->rx_ring; in ocelot_fdma_rx_restart()
298 struct ocelot_fdma_rx_ring *rx_ring = &ocelot->fdma->rx_ring; in ocelot_fdma_reuse_rx_page() local
301 new_rxb = &rx_ring->bufs[rx_ring->next_to_alloc]; in ocelot_fdma_reuse_rx_page()
302 rx_ring->next_to_alloc = ocelot_fdma_idx_next(rx_ring->next_to_alloc, in ocelot_fdma_reuse_rx_page()
402 rx_ring = &fdma->rx_ring; in ocelot_fdma_rx_get()
[all …]
/drivers/net/ethernet/aquantia/atlantic/
A Daq_ring.c418 ++rx_ring->stats.rx.packets; in aq_xdp_run_prog()
451 ++rx_ring->stats.rx.xdp_tx; in aq_xdp_run_prog()
702 int frame_sz = rx_ring->page_offset + rx_ring->frame_max + in __aq_ring_xdp_clean()
703 rx_ring->tail_size; in __aq_ring_xdp_clean()
710 for (; (rx_ring->sw_head != rx_ring->hw_head) && budget; in __aq_ring_xdp_clean()
711 rx_ring->sw_head = aq_ring_next_dx(rx_ring, rx_ring->sw_head), in __aq_ring_xdp_clean()
713 struct aq_ring_buff_s *buff = &rx_ring->buff_ring[rx_ring->sw_head]; in __aq_ring_xdp_clean()
714 bool is_ptp_ring = aq_ptp_ring(rx_ring->aq_nic, rx_ring); in __aq_ring_xdp_clean()
738 rx_ring->hw_head); in __aq_ring_xdp_clean()
774 ++rx_ring->stats.rx.errors; in __aq_ring_xdp_clean()
[all …]
/drivers/net/ethernet/intel/fm10k/
A Dfm10k_main.c127 bi = &rx_ring->rx_buffer[i]; in fm10k_alloc_rx_buffers()
128 i -= rx_ring->count; in fm10k_alloc_rx_buffers()
144 bi = rx_ring->rx_buffer; in fm10k_alloc_rx_buffers()
145 i -= rx_ring->count; in fm10k_alloc_rx_buffers()
154 i += rx_ring->count; in fm10k_alloc_rx_buffers()
158 rx_ring->next_to_use = i; in fm10k_alloc_rx_buffers()
171 writel(i, rx_ring->tail); in fm10k_alloc_rx_buffers()
192 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in fm10k_reuse_rx_page()
306 rx_buffer = &rx_ring->rx_buffer[rx_ring->next_to_clean]; in fm10k_fetch_rx_buffer()
578 rx_desc = FM10K_RX_DESC(rx_ring, rx_ring->next_to_clean); in fm10k_clean_rx_irq()
[all …]
/drivers/net/ethernet/intel/igbvf/
A Dnetdev.c247 struct igbvf_ring *rx_ring = adapter->rx_ring; in igbvf_clean_rx_irq() local
469 rx_ring->size = rx_ring->count * desc_len; in igbvf_setup_rx_resources()
470 rx_ring->size = ALIGN(rx_ring->size, 4096); in igbvf_setup_rx_resources()
472 rx_ring->desc = dma_alloc_coherent(&pdev->dev, rx_ring->size, in igbvf_setup_rx_resources()
474 if (!rx_ring->desc) in igbvf_setup_rx_resources()
599 memset(rx_ring->desc, 0, rx_ring->size); in igbvf_clean_rx_ring()
624 dma_free_coherent(&pdev->dev, rx_ring->size, rx_ring->desc, in igbvf_free_rx_resources()
625 rx_ring->dma); in igbvf_free_rx_resources()
975 struct igbvf_ring *rx_ring = adapter->rx_ring; in igbvf_configure_msix() local
985 writel(rx_ring->itr_val, hw->hw_addr + rx_ring->itr_register); in igbvf_configure_msix()
[all …]
/drivers/net/ethernet/wangxun/libwx/
A Dwx_lib.c187 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in wx_get_rx_buffer()
334 i -= rx_ring->count; in wx_alloc_rx_buffers()
646 rx_desc = WX_RX_DESC(rx_ring, rx_ring->next_to_clean); in wx_clean_rx_irq()
2449 memset(rx_ring->desc, 0, rx_ring->size); in wx_clean_rx_ring()
2485 dma_free_coherent(rx_ring->dev, rx_ring->size, in wx_free_rx_resources()
2486 rx_ring->desc, rx_ring->dma); in wx_free_rx_resources()
2676 rx_ring->size = rx_ring->count * sizeof(union wx_rx_desc); in wx_setup_rx_resources()
2677 rx_ring->size = ALIGN(rx_ring->size, 4096); in wx_setup_rx_resources()
2680 rx_ring->desc = dma_alloc_coherent(dev, rx_ring->size, in wx_setup_rx_resources()
2684 rx_ring->desc = dma_alloc_coherent(dev, rx_ring->size, in wx_setup_rx_resources()
[all …]
/drivers/net/ethernet/intel/ixgbevf/
A Dixgbevf_main.c537 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbevf_get_rx_buffer()
775 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in ixgbevf_reuse_rx_page()
1145 rx_desc = IXGBEVF_RX_DESC(rx_ring, rx_ring->next_to_clean); in ixgbevf_clean_rx_irq()
1240 rx_ring->skb = skb; in ixgbevf_clean_rx_irq()
2027 struct ixgbevf_ring *rx_ring = adapter->rx_ring[i]; in ixgbevf_configure_rx() local
3159 struct ixgbevf_ring *rx_ring = adapter->rx_ring[i]; in ixgbevf_update_stats() local
3507 rx_ring->size = rx_ring->count * sizeof(union ixgbe_adv_rx_desc); in ixgbevf_setup_rx_resources()
3508 rx_ring->size = ALIGN(rx_ring->size, 4096); in ixgbevf_setup_rx_resources()
3510 rx_ring->desc = dma_alloc_coherent(rx_ring->dev, rx_ring->size, in ixgbevf_setup_rx_resources()
3576 dma_free_coherent(rx_ring->dev, rx_ring->size, rx_ring->desc, in ixgbevf_free_rx_resources()
[all …]
/drivers/net/ethernet/netronome/nfp/nfd3/
A Ddp.c593 wr_idx = D_IDX(rx_ring, rx_ring->wr_p); in nfp_nfd3_rx_give_one()
611 rx_ring->wr_p++; in nfp_nfd3_rx_give_one()
635 nfp_nfd3_rx_give_one(dp, rx_ring, rx_ring->rxbufs[i].frag, in nfp_nfd3_rx_ring_fill_freelist()
857 nfp_nfd3_rx_drop(dp, rx_ring->r_vec, rx_ring, rxbuf, in nfp_nfd3_tx_xdp_buf()
921 &rx_ring->xdp_rxq); in nfp_nfd3_rx()
935 idx = D_IDX(rx_ring, rx_ring->rd_p); in nfp_nfd3_rx()
948 rx_ring->rd_p++; in nfp_nfd3_rx()
1163 if (r_vec->rx_ring) in nfp_nfd3_poll()
1329 idx = D_IDX(rx_ring, rx_ring->rd_p); in nfp_ctrl_rx_one()
1340 rx_ring->rd_p++; in nfp_ctrl_rx_one()
[all …]
/drivers/net/ethernet/netronome/nfp/nfdk/
A Ddp.c613 wr_idx = D_IDX(rx_ring, rx_ring->wr_p); in nfp_nfdk_rx_give_one()
627 rx_ring->wr_p++; in nfp_nfdk_rx_give_one()
648 nfp_nfdk_rx_give_one(dp, rx_ring, rx_ring->rxbufs[i].frag, in nfp_nfdk_rx_ring_fill_freelist()
854 rx_ring = r_vec->rx_ring; in nfp_nfdk_xdp_complete()
927 nfp_nfdk_rx_drop(dp, rx_ring->r_vec, rx_ring, rxbuf, in nfp_nfdk_tx_xdp_buf()
1057 idx = D_IDX(rx_ring, rx_ring->rd_p); in nfp_nfdk_rx()
1070 rx_ring->rd_p++; in nfp_nfdk_rx()
1273 if (r_vec->rx_ring) in nfp_nfdk_poll()
1491 idx = D_IDX(rx_ring, rx_ring->rd_p); in nfp_ctrl_rx_one()
1502 rx_ring->rd_p++; in nfp_ctrl_rx_one()
[all …]

Completed in 98 milliseconds

12345678910>>...12