Lines Matching refs:xsk_pool
214 if (xsk_uses_need_wakeup(rx_ring->xsk_pool) && err == -ENOBUFS) in i40e_run_xdp_zc()
256 nb_buffs = xsk_buff_alloc_batch(rx_ring->xsk_pool, xdp, nb_buffs); in i40e_alloc_rx_buffers_zc()
433 xsk_buff_dma_sync_for_cpu(bi, rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
455 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in i40e_clean_rx_irq_zc()
457 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
459 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
472 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc->addr); in i40e_xmit_pkt()
473 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, desc->len); in i40e_xmit_pkt()
492 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc[i].addr); in i40e_xmit_pkt_batch()
493 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, desc[i].len); in i40e_xmit_pkt_batch()
538 struct xdp_desc *descs = xdp_ring->xsk_pool->tx_descs; in i40e_xmit_zc()
542 nb_pkts = xsk_tx_peek_release_desc_batch(xdp_ring->xsk_pool, budget); in i40e_xmit_zc()
589 struct xsk_buff_pool *bp = tx_ring->xsk_pool; in i40e_clean_xdp_tx_irq()
634 if (xsk_uses_need_wakeup(tx_ring->xsk_pool)) in i40e_clean_xdp_tx_irq()
635 xsk_set_tx_need_wakeup(tx_ring->xsk_pool); in i40e_clean_xdp_tx_irq()
667 if (!vsi->xdp_rings[queue_id]->xsk_pool) in i40e_xsk_wakeup()
704 struct xsk_buff_pool *bp = tx_ring->xsk_pool; in i40e_xsk_clean_tx_ring()