Lines Matching refs:xskb

60 	struct xdp_buff_xsk *xskb;  in xp_create_and_assign_umem()  local
98 xskb = &pool->heads[i]; in xp_create_and_assign_umem()
99 xskb->pool = pool; in xp_create_and_assign_umem()
100 xskb->xdp.frame_sz = umem->chunk_size - umem->headroom; in xp_create_and_assign_umem()
101 INIT_LIST_HEAD(&xskb->free_list_node); in xp_create_and_assign_umem()
103 pool->free_heads[i] = xskb; in xp_create_and_assign_umem()
105 xp_init_xskb_addr(xskb, pool, i * pool->chunk_size); in xp_create_and_assign_umem()
390 struct xdp_buff_xsk *xskb = &pool->heads[i]; in xp_init_dma_info() local
392 xp_init_xskb_dma(xskb, pool, dma_map->dma_pages, xskb->orig_addr); in xp_init_dma_info()
480 struct xdp_buff_xsk *xskb; in __xp_alloc() local
504 xskb = pool->free_heads[--pool->free_heads_cnt]; in __xp_alloc()
505 xp_init_xskb_addr(xskb, pool, addr); in __xp_alloc()
507 xp_init_xskb_dma(xskb, pool, pool->dma_pages, addr); in __xp_alloc()
509 xskb = &pool->heads[xp_aligned_extract_idx(pool, addr)]; in __xp_alloc()
513 return xskb; in __xp_alloc()
518 struct xdp_buff_xsk *xskb; in xp_alloc() local
521 xskb = __xp_alloc(pool); in xp_alloc()
522 if (!xskb) in xp_alloc()
526 xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk, in xp_alloc()
528 list_del_init(&xskb->free_list_node); in xp_alloc()
531 xskb->xdp.data = xskb->xdp.data_hard_start + XDP_PACKET_HEADROOM; in xp_alloc()
532 xskb->xdp.data_meta = xskb->xdp.data; in xp_alloc()
535 dma_sync_single_range_for_device(pool->dev, xskb->dma, 0, in xp_alloc()
539 return &xskb->xdp; in xp_alloc()
555 struct xdp_buff_xsk *xskb; in xp_alloc_new_from_fq() local
570 xskb = pool->free_heads[--pool->free_heads_cnt]; in xp_alloc_new_from_fq()
571 xp_init_xskb_addr(xskb, pool, addr); in xp_alloc_new_from_fq()
573 xp_init_xskb_dma(xskb, pool, pool->dma_pages, addr); in xp_alloc_new_from_fq()
575 xskb = &pool->heads[xp_aligned_extract_idx(pool, addr)]; in xp_alloc_new_from_fq()
578 *xdp = &xskb->xdp; in xp_alloc_new_from_fq()
588 struct xdp_buff_xsk *xskb; in xp_alloc_reused() local
595 xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk, free_list_node); in xp_alloc_reused()
596 list_del_init(&xskb->free_list_node); in xp_alloc_reused()
598 *xdp = &xskb->xdp; in xp_alloc_reused()
645 void xp_free(struct xdp_buff_xsk *xskb) in xp_free() argument
647 if (!list_empty(&xskb->free_list_node)) in xp_free()
650 xskb->pool->free_list_cnt++; in xp_free()
651 list_add(&xskb->free_list_node, &xskb->pool->free_list); in xp_free()
671 void xp_dma_sync_for_cpu_slow(struct xdp_buff_xsk *xskb) in xp_dma_sync_for_cpu_slow() argument
673 dma_sync_single_range_for_cpu(xskb->pool->dev, xskb->dma, 0, in xp_dma_sync_for_cpu_slow()
674 xskb->pool->frame_len, DMA_BIDIRECTIONAL); in xp_dma_sync_for_cpu_slow()