Lines Matching refs:xdp
25 struct libeth_xdp_buff *xdp) in libeth_xsk_tx_queue_head() argument
28 .xsk = xdp, in libeth_xsk_tx_queue_head()
29 __libeth_xdp_tx_len(xdp->base.data_end - xdp->data, in libeth_xsk_tx_queue_head()
33 if (likely(!xdp_buff_has_frags(&xdp->base))) in libeth_xsk_tx_queue_head()
65 struct libeth_xdp_buff *xdp, in libeth_xsk_tx_queue_bulk() argument
73 libeth_xsk_buff_free_slow(xdp); in libeth_xsk_tx_queue_bulk()
77 if (!libeth_xsk_tx_queue_head(bq, xdp)) in libeth_xsk_tx_queue_bulk()
80 for (const struct libeth_xdp_buff *head = xdp; ; ) { in libeth_xsk_tx_queue_bulk()
81 xdp = container_of(xsk_buff_get_frag(&head->base), in libeth_xsk_tx_queue_bulk()
82 typeof(*xdp), base); in libeth_xsk_tx_queue_bulk()
83 if (!xdp) in libeth_xsk_tx_queue_bulk()
92 libeth_xsk_tx_queue_frag(bq, xdp); in libeth_xsk_tx_queue_bulk()
115 struct libeth_xdp_buff *xdp = frm.xsk; in libeth_xsk_tx_fill_buf() local
117 .addr = xsk_buff_xdp_get_dma(&xdp->base), in libeth_xsk_tx_fill_buf()
125 sqe->xsk = xdp; in libeth_xsk_tx_fill_buf()
134 xdp_get_shared_info_from_buff(&xdp->base)); in libeth_xsk_tx_fill_buf()
334 struct libeth_xdp_buff *xdp);
352 struct libeth_xdp_buff *xdp, u32 len) in libeth_xsk_process_buff() argument
355 libeth_xsk_buff_free_slow(xdp); in libeth_xsk_process_buff()
359 xsk_buff_set_size(&xdp->base, len); in libeth_xsk_process_buff()
360 xsk_buff_dma_sync_for_cpu(&xdp->base); in libeth_xsk_process_buff()
363 return libeth_xsk_buff_add_frag(head, xdp); in libeth_xsk_process_buff()
365 prefetch(xdp->data); in libeth_xsk_process_buff()
367 return xdp; in libeth_xsk_process_buff()
371 const struct libeth_xdp_buff *xdp);
373 u32 __libeth_xsk_run_prog_slow(struct libeth_xdp_buff *xdp,
390 __libeth_xsk_run_prog(struct libeth_xdp_buff *xdp, in __libeth_xsk_run_prog() argument
396 act = bpf_prog_run_xdp(bq->prog, &xdp->base); in __libeth_xsk_run_prog()
399 return __libeth_xsk_run_prog_slow(xdp, bq, act, ret); in __libeth_xsk_run_prog()
401 ret = xdp_do_redirect(bq->dev, &xdp->base, bq->prog); in __libeth_xsk_run_prog()
419 #define libeth_xsk_run_prog(xdp, bq, fl) \ argument
420 __libeth_xdp_run_flush(xdp, bq, __libeth_xsk_run_prog, \
441 __libeth_xsk_run_pass(struct libeth_xdp_buff *xdp, in __libeth_xsk_run_pass() argument
444 void (*prep)(struct libeth_xdp_buff *xdp, in __libeth_xsk_run_pass() argument
446 u32 (*run)(struct libeth_xdp_buff *xdp, in __libeth_xsk_run_pass() argument
449 const struct libeth_xdp_buff *xdp, in __libeth_xsk_run_pass() argument
455 rs->bytes += xdp->base.data_end - xdp->data; in __libeth_xsk_run_pass()
458 if (unlikely(xdp_buff_has_frags(&xdp->base))) in __libeth_xsk_run_pass()
459 libeth_xsk_buff_stats_frags(rs, xdp); in __libeth_xsk_run_pass()
462 prep(xdp, md); in __libeth_xsk_run_pass()
464 act = run(xdp, bq); in __libeth_xsk_run_pass()
471 skb = xdp_build_skb_from_zc(&xdp->base); in __libeth_xsk_run_pass()
473 libeth_xsk_buff_free_slow(xdp); in __libeth_xsk_run_pass()
477 if (unlikely(!populate(skb, xdp, rs))) { in __libeth_xsk_run_pass()
500 #define libeth_xsk_run_pass(xdp, bq, napi, rs, desc, run, populate) \ argument
501 __libeth_xsk_run_pass(xdp, bq, napi, rs, desc, libeth_xdp_prep_desc, \
627 #define libeth_xsk_buff_xdp_get_dma(xdp) \ argument
628 xsk_buff_xdp_get_dma(&(xdp)->base)