Lines Matching refs:umem

72 	struct xsk_umem *umem;  member
92 int xsk_umem__fd(const struct xsk_umem *umem) in xsk_umem__fd() argument
94 return umem ? umem->fd : -EINVAL; in xsk_umem__fd()
163 static int xsk_create_umem_rings(struct xsk_umem *umem, int fd, in xsk_create_umem_rings() argument
172 &umem->config.fill_size, in xsk_create_umem_rings()
173 sizeof(umem->config.fill_size)); in xsk_create_umem_rings()
178 &umem->config.comp_size, in xsk_create_umem_rings()
179 sizeof(umem->config.comp_size)); in xsk_create_umem_rings()
187 map = mmap(NULL, off.fr.desc + umem->config.fill_size * sizeof(__u64), in xsk_create_umem_rings()
193 fill->mask = umem->config.fill_size - 1; in xsk_create_umem_rings()
194 fill->size = umem->config.fill_size; in xsk_create_umem_rings()
199 fill->cached_cons = umem->config.fill_size; in xsk_create_umem_rings()
201 map = mmap(NULL, off.cr.desc + umem->config.comp_size * sizeof(__u64), in xsk_create_umem_rings()
209 comp->mask = umem->config.comp_size - 1; in xsk_create_umem_rings()
210 comp->size = umem->config.comp_size; in xsk_create_umem_rings()
219 munmap(map, off.fr.desc + umem->config.fill_size * sizeof(__u64)); in xsk_create_umem_rings()
229 struct xsk_umem *umem; in xsk_umem__create() local
237 umem = calloc(1, sizeof(*umem)); in xsk_umem__create()
238 if (!umem) in xsk_umem__create()
241 umem->fd = socket(AF_XDP, SOCK_RAW | SOCK_CLOEXEC, 0); in xsk_umem__create()
242 if (umem->fd < 0) { in xsk_umem__create()
247 umem->umem_area = umem_area; in xsk_umem__create()
248 INIT_LIST_HEAD(&umem->ctx_list); in xsk_umem__create()
249 xsk_set_umem_config(&umem->config, usr_config); in xsk_umem__create()
254 mr.chunk_size = umem->config.frame_size; in xsk_umem__create()
255 mr.headroom = umem->config.frame_headroom; in xsk_umem__create()
256 mr.flags = umem->config.flags; in xsk_umem__create()
257 mr.tx_metadata_len = umem->config.tx_metadata_len; in xsk_umem__create()
259 err = setsockopt(umem->fd, SOL_XDP, XDP_UMEM_REG, &mr, sizeof(mr)); in xsk_umem__create()
265 err = xsk_create_umem_rings(umem, umem->fd, fill, comp); in xsk_umem__create()
269 umem->fill_save = fill; in xsk_umem__create()
270 umem->comp_save = comp; in xsk_umem__create()
271 *umem_ptr = umem; in xsk_umem__create()
275 close(umem->fd); in xsk_umem__create()
277 free(umem); in xsk_umem__create()
458 static struct xsk_ctx *xsk_get_ctx(struct xsk_umem *umem, int ifindex, in xsk_get_ctx() argument
463 if (list_empty(&umem->ctx_list)) in xsk_get_ctx()
466 list_for_each_entry(ctx, &umem->ctx_list, list) { in xsk_get_ctx()
478 struct xsk_umem *umem = ctx->umem; in xsk_put_ctx() local
488 err = xsk_get_mmap_offsets(umem->fd, &off); in xsk_put_ctx()
492 munmap(ctx->fill->ring - off.fr.desc, off.fr.desc + umem->config.fill_size * in xsk_put_ctx()
494 munmap(ctx->comp->ring - off.cr.desc, off.cr.desc + umem->config.comp_size * in xsk_put_ctx()
503 struct xsk_umem *umem, int ifindex, in xsk_create_ctx() argument
515 if (!umem->fill_save) { in xsk_create_ctx()
516 err = xsk_create_umem_rings(umem, xsk->fd, fill, comp); in xsk_create_ctx()
521 } else if (umem->fill_save != fill || umem->comp_save != comp) { in xsk_create_ctx()
523 memcpy(fill, umem->fill_save, sizeof(*fill)); in xsk_create_ctx()
524 memcpy(comp, umem->comp_save, sizeof(*comp)); in xsk_create_ctx()
529 ctx->umem = umem; in xsk_create_ctx()
534 list_add(&ctx->list, &umem->ctx_list); in xsk_create_ctx()
540 __u32 queue_id, struct xsk_umem *umem, in xsk_socket__create_shared() argument
555 if (!umem || !xsk_ptr || !(rx || tx)) in xsk_socket__create_shared()
558 unmap = umem->fill_save != fill; in xsk_socket__create_shared()
568 if (umem->refcount++ > 0) { in xsk_socket__create_shared()
575 xsk->fd = umem->fd; in xsk_socket__create_shared()
576 rx_setup_done = umem->rx_ring_setup_done; in xsk_socket__create_shared()
577 tx_setup_done = umem->tx_ring_setup_done; in xsk_socket__create_shared()
580 ctx = xsk_get_ctx(umem, ifindex, queue_id); in xsk_socket__create_shared()
587 ctx = xsk_create_ctx(xsk, umem, ifindex, queue_id, fill, comp); in xsk_socket__create_shared()
603 if (xsk->fd == umem->fd) in xsk_socket__create_shared()
604 umem->rx_ring_setup_done = true; in xsk_socket__create_shared()
614 if (xsk->fd == umem->fd) in xsk_socket__create_shared()
615 umem->tx_ring_setup_done = true; in xsk_socket__create_shared()
672 if (umem->refcount > 1) { in xsk_socket__create_shared()
674 sxdp.sxdp_shared_umem_fd = umem->fd; in xsk_socket__create_shared()
686 umem->fill_save = NULL; in xsk_socket__create_shared()
687 umem->comp_save = NULL; in xsk_socket__create_shared()
701 if (--umem->refcount) in xsk_socket__create_shared()
709 __u32 queue_id, struct xsk_umem *umem, in xsk_socket__create() argument
713 if (!umem) in xsk_socket__create()
716 return xsk_socket__create_shared(xsk_ptr, ifindex, queue_id, umem, in xsk_socket__create()
717 rx, tx, umem->fill_save, in xsk_socket__create()
718 umem->comp_save, usr_config); in xsk_socket__create()
721 int xsk_umem__delete(struct xsk_umem *umem) in xsk_umem__delete() argument
726 if (!umem) in xsk_umem__delete()
729 if (umem->refcount) in xsk_umem__delete()
732 err = xsk_get_mmap_offsets(umem->fd, &off); in xsk_umem__delete()
733 if (!err && umem->fill_save && umem->comp_save) { in xsk_umem__delete()
734 munmap(umem->fill_save->ring - off.fr.desc, in xsk_umem__delete()
735 off.fr.desc + umem->config.fill_size * sizeof(__u64)); in xsk_umem__delete()
736 munmap(umem->comp_save->ring - off.cr.desc, in xsk_umem__delete()
737 off.cr.desc + umem->config.comp_size * sizeof(__u64)); in xsk_umem__delete()
740 close(umem->fd); in xsk_umem__delete()
741 free(umem); in xsk_umem__delete()
750 struct xsk_umem *umem; in xsk_socket__delete() local
758 umem = ctx->umem; in xsk_socket__delete()
774 umem->refcount--; in xsk_socket__delete()
778 if (xsk->fd != umem->fd) in xsk_socket__delete()