| /net/core/ |
| A D | netdev_rx_queue.c | 94 struct netdev_rx_queue *rxq; in __net_mp_open_rxq() local 119 rxq = __netif_get_rx_queue(dev, rxq_idx); in __net_mp_open_rxq() 120 if (rxq->mp_params.mp_ops) { in __net_mp_open_rxq() 125 if (rxq->pool) { in __net_mp_open_rxq() 131 rxq->mp_params = *p; in __net_mp_open_rxq() 134 rxq->mp_params.mp_ops = NULL; in __net_mp_open_rxq() 135 rxq->mp_params.mp_priv = NULL; in __net_mp_open_rxq() 154 struct netdev_rx_queue *rxq; in __net_mp_close_rxq() local 166 !rxq->mp_params.mp_ops) in __net_mp_close_rxq() 173 rxq->mp_params.mp_ops = NULL; in __net_mp_close_rxq() [all …]
|
| A D | devmem.c | 120 struct netdev_rx_queue *rxq; in net_devmem_unbind_dmabuf() local 134 xa_for_each(&binding->bound_rxqs, xa_idx, rxq) { in net_devmem_unbind_dmabuf() 140 rxq_idx = get_netdev_rx_queue_index(rxq); in net_devmem_unbind_dmabuf() 156 struct netdev_rx_queue *rxq; in net_devmem_bind_dmabuf_to_queue() local 164 rxq = __netif_get_rx_queue(dev, rxq_idx); in net_devmem_bind_dmabuf_to_queue() 165 err = xa_alloc(&binding->bound_rxqs, &xa_idx, rxq, xa_limit_32b, in net_devmem_bind_dmabuf_to_queue() 460 struct netdev_rx_queue *rxq) in mp_dmabuf_devmem_nl_fill() argument 463 int type = rxq ? NETDEV_A_QUEUE_DMABUF : NETDEV_A_PAGE_POOL_DMABUF; in mp_dmabuf_devmem_nl_fill() 469 struct netdev_rx_queue *rxq) in mp_dmabuf_devmem_uninstall() argument 476 if (bound_rxq == rxq) { in mp_dmabuf_devmem_uninstall()
|
| A D | xdp.c | 545 __xdp_return(netmem, xdp->rxq->mem.type, true, NULL); in xdp_return_frag() 559 xdp->rxq->mem.type, true, xdp); in xdp_return_buff() 562 __xdp_return(virt_to_netmem(xdp->data), xdp->rxq->mem.type, true, xdp); in xdp_return_buff() 635 const struct xdp_rxq_info *rxq = xdp->rxq; in xdp_build_skb_from_buff() local 657 if (rxq->mem.type == MEM_TYPE_PAGE_POOL) in xdp_build_skb_from_buff() 660 skb_record_rx_queue(skb, rxq->queue_index); in xdp_build_skb_from_buff() 671 skb->protocol = eth_type_trans(skb, rxq->dev); in xdp_build_skb_from_buff() 741 const struct xdp_rxq_info *rxq = xdp->rxq; in xdp_build_skb_from_zc() local 775 skb_record_rx_queue(skb, rxq->queue_index); in xdp_build_skb_from_zc() 786 skb->protocol = eth_type_trans(skb, rxq->dev); in xdp_build_skb_from_zc()
|
| A D | page_pool_priv.h | 44 struct netdev_rx_queue *rxq); 54 struct netdev_rx_queue *rxq) in page_pool_check_memory_provider() argument
|
| A D | page_pool_user.c | 356 struct netdev_rx_queue *rxq) in page_pool_check_memory_provider() argument 358 void *binding = rxq->mp_params.mp_priv; in page_pool_check_memory_provider() 370 if (pool->slow.queue_idx == get_netdev_rx_queue_index(rxq)) { in page_pool_check_memory_provider()
|
| A D | netdev-genl.c | 394 struct netdev_rx_queue *rxq; in netdev_nl_queue_fill_one() local 409 rxq = __netif_get_rx_queue(netdev, q_idx); in netdev_nl_queue_fill_one() 410 if (nla_put_napi_id(rsp, rxq->napi)) in netdev_nl_queue_fill_one() 413 params = &rxq->mp_params; in netdev_nl_queue_fill_one() 415 params->mp_ops->nl_fill(params->mp_priv, rsp, rxq)) in netdev_nl_queue_fill_one() 418 if (rxq->pool) in netdev_nl_queue_fill_one()
|
| A D | page_pool.c | 198 struct netdev_rx_queue *rxq; in page_pool_init() local 283 rxq = __netif_get_rx_queue(pool->slow.netdev, in page_pool_init() 285 pool->mp_priv = rxq->mp_params.mp_priv; in page_pool_init() 286 pool->mp_ops = rxq->mp_params.mp_ops; in page_pool_init()
|
| A D | dev.c | 3226 if (rxq < 1 || rxq > dev->num_rx_queues) in netif_set_real_num_rx_queues() 3233 rxq); in netif_set_real_num_rx_queues() 3238 dev->real_num_rx_queues = rxq; in netif_set_real_num_rx_queues() 3253 unsigned int txq, unsigned int rxq) in netif_set_real_num_queues() argument 3259 rxq < 1 || rxq > dev->num_rx_queues) in netif_set_real_num_queues() 3265 if (rxq > dev->real_num_rx_queues) { in netif_set_real_num_queues() 3275 if (rxq < dev->real_num_rx_queues) in netif_set_real_num_queues() 7062 struct netdev_rx_queue *rxq; in netif_queue_set_napi() local 7072 rxq->napi = napi; in netif_queue_set_napi() 12071 struct netdev_rx_queue *rxq = &dev->_rx[i]; in dev_memory_provider_uninstall() local [all …]
|
| A D | filter.c | 4137 struct xdp_rxq_info *rxq = xdp->rxq; in bpf_xdp_frags_increase_tail() local 4140 if (!rxq->frag_size || rxq->frag_size > xdp->frame_sz) in bpf_xdp_frags_increase_tail() 4150 if (rxq->mem.type == MEM_TYPE_XSK_BUFF_POOL) in bpf_xdp_frags_increase_tail() 4172 enum xdp_mem_type mem_type = xdp->rxq->mem.type; in bpf_xdp_shrink_data() 4367 if (slave && slave != xdp->rxq->dev) { in xdp_master_redirect() 6426 struct net_device *dev = xdp->rxq->dev; in BPF_CALL_5() 7078 struct net_device *dev = ctx->rxq->dev; in BPF_CALL_5() 7102 struct net_device *dev = ctx->rxq->dev; in BPF_CALL_5() 7126 struct net_device *dev = ctx->rxq->dev; in BPF_CALL_5() 10225 offsetof(struct xdp_buff, rxq)); in xdp_convert_ctx_access() [all …]
|
| A D | net-sysfs.c | 2101 int error = 0, txq = 0, rxq = 0, real_rx = 0, real_tx = 0; in register_queue_kobjects() local 2115 rxq = real_rx; in register_queue_kobjects() 2126 net_rx_queue_update_kobjects(dev, rxq, 0); in register_queue_kobjects()
|
| /net/bpf/ |
| A D | test_run.c | 113 struct xdp_rxq_info rxq; member 151 xdp_init_buff(new_ctx, TEST_XDP_FRAME_SIZE, &xdp->rxq); in xdp_test_run_init_page() 156 frm->mem_type = new_ctx->rxq->mem.type; in xdp_test_run_init_page() 198 xdp_rxq_info_reg(&xdp->rxq, orig_ctx->rxq->dev, 0, 0); in xdp_test_run_setup() 199 xdp->rxq.mem.type = MEM_TYPE_PAGE_POOL; in xdp_test_run_setup() 200 xdp->rxq.mem.id = pp->xdp_mem_id; in xdp_test_run_setup() 201 xdp->dev = orig_ctx->rxq->dev; in xdp_test_run_setup() 249 head->frame->mem_type = head->orig_ctx.rxq->mem.type; in reset_ctx() 1179 xdp->rxq = &rxqueue->xdp_rxq; in xdp_convert_md_to_buff() 1202 dev_put(xdp->rxq->dev); in xdp_convert_buff_to_md()
|
| /net/xdp/ |
| A D | xsk.c | 325 if (xs->dev != xdp->rxq->dev || xs->queue_id != xdp->rxq->queue_index) in xsk_rcv_check() 368 if (xdp->rxq->mem.type == MEM_TYPE_XSK_BUFF_POOL) { in xsk_rcv() 1312 struct netdev_rx_queue *rxq; in xsk_bind() local 1314 rxq = __netif_get_rx_queue(dev, qid); in xsk_bind() 1315 if (rxq->napi) in xsk_bind() 1316 __sk_mark_napi_id_once(sk, rxq->napi->napi_id); in xsk_bind()
|
| A D | xsk_buff_pool.c | 121 void xp_set_rxq_info(struct xsk_buff_pool *pool, struct xdp_rxq_info *rxq) in xp_set_rxq_info() argument 126 pool->heads[i].xdp.rxq = rxq; in xp_set_rxq_info()
|
| /net/xfrm/ |
| A D | xfrm_state_bpf.c | 68 struct net *net = dev_net(xdp->rxq->dev); in bpf_xdp_get_xfrm_state()
|
| /net/netfilter/ |
| A D | nf_flow_table_bpf.c | 94 tuplehash = bpf_xdp_flow_tuple_lookup(xdp->rxq->dev, &tuple, proto); in bpf_xdp_flow_lookup()
|
| A D | nf_conntrack_bpf.c | 299 nfct = __bpf_nf_ct_alloc_entry(dev_net(ctx->rxq->dev), bpf_tuple, tuple__sz, in bpf_xdp_ct_alloc() 334 caller_net = dev_net(ctx->rxq->dev); in bpf_xdp_ct_lookup()
|