Lines Matching refs:bl

43 			      struct io_buffer_list *bl, unsigned int bgid)  in io_buffer_add_list()  argument
50 bl->bgid = bgid; in io_buffer_add_list()
51 atomic_set(&bl->refs, 1); in io_buffer_add_list()
52 return xa_err(xa_store(&ctx->io_bl_xa, bgid, bl, GFP_KERNEL)); in io_buffer_add_list()
58 struct io_buffer_list *bl; in io_kbuf_recycle_legacy() local
64 bl = io_buffer_get_list(ctx, buf->bgid); in io_kbuf_recycle_legacy()
65 list_add(&buf->list, &bl->buf_list); in io_kbuf_recycle_legacy()
101 struct io_buffer_list *bl) in io_provided_buffer_select() argument
103 if (!list_empty(&bl->buf_list)) { in io_provided_buffer_select()
106 kbuf = list_first_entry(&bl->buf_list, struct io_buffer, list); in io_provided_buffer_select()
110 if (list_empty(&bl->buf_list)) in io_provided_buffer_select()
121 struct io_buffer_list *bl, in io_provided_buffers_select() argument
126 buf = io_provided_buffer_select(req, len, bl); in io_provided_buffers_select()
136 struct io_buffer_list *bl, in io_ring_buffer_select() argument
139 struct io_uring_buf_ring *br = bl->buf_ring; in io_ring_buffer_select()
140 __u16 tail, head = bl->head; in io_ring_buffer_select()
150 buf = io_ring_head_to_buf(br, head, bl->mask); in io_ring_buffer_select()
154 req->buf_list = bl; in io_ring_buffer_select()
168 io_kbuf_commit(req, bl, *len, 1); in io_ring_buffer_select()
178 struct io_buffer_list *bl; in io_buffer_select() local
183 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffer_select()
184 if (likely(bl)) { in io_buffer_select()
185 if (bl->flags & IOBL_BUF_RING) in io_buffer_select()
186 ret = io_ring_buffer_select(req, len, bl, issue_flags); in io_buffer_select()
188 ret = io_provided_buffer_select(req, len, bl); in io_buffer_select()
198 struct io_buffer_list *bl) in io_ring_buffers_peek() argument
200 struct io_uring_buf_ring *br = bl->buf_ring; in io_ring_buffers_peek()
207 head = bl->head; in io_ring_buffers_peek()
212 buf = io_ring_head_to_buf(br, head, bl->mask); in io_ring_buffers_peek()
224 if (bl->flags & IOBL_INC) { in io_ring_buffers_peek()
263 if (!(bl->flags & IOBL_INC)) in io_ring_buffers_peek()
276 buf = io_ring_head_to_buf(br, ++head, bl->mask); in io_ring_buffers_peek()
283 req->buf_list = bl; in io_ring_buffers_peek()
291 struct io_buffer_list *bl; in io_buffers_select() local
295 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffers_select()
296 if (unlikely(!bl)) in io_buffers_select()
299 if (bl->flags & IOBL_BUF_RING) { in io_buffers_select()
300 ret = io_ring_buffers_peek(req, arg, bl); in io_buffers_select()
310 io_kbuf_commit(req, bl, arg->out_len, ret); in io_buffers_select()
313 ret = io_provided_buffers_select(req, &arg->out_len, bl, arg->iovs); in io_buffers_select()
323 struct io_buffer_list *bl; in io_buffers_peek() local
328 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffers_peek()
329 if (unlikely(!bl)) in io_buffers_peek()
332 if (bl->flags & IOBL_BUF_RING) { in io_buffers_peek()
333 ret = io_ring_buffers_peek(req, arg, bl); in io_buffers_peek()
340 return io_provided_buffers_select(req, &arg->max_len, bl, arg->iovs); in io_buffers_peek()
344 struct io_buffer_list *bl, unsigned nbufs) in __io_remove_buffers() argument
352 if (bl->flags & IOBL_BUF_RING) { in __io_remove_buffers()
353 i = bl->buf_ring->tail - bl->head; in __io_remove_buffers()
354 if (bl->buf_nr_pages) { in __io_remove_buffers()
357 if (!(bl->flags & IOBL_MMAP)) { in __io_remove_buffers()
358 for (j = 0; j < bl->buf_nr_pages; j++) in __io_remove_buffers()
359 unpin_user_page(bl->buf_pages[j]); in __io_remove_buffers()
361 io_pages_unmap(bl->buf_ring, &bl->buf_pages, in __io_remove_buffers()
362 &bl->buf_nr_pages, bl->flags & IOBL_MMAP); in __io_remove_buffers()
363 bl->flags &= ~IOBL_MMAP; in __io_remove_buffers()
366 INIT_LIST_HEAD(&bl->buf_list); in __io_remove_buffers()
367 bl->flags &= ~IOBL_BUF_RING; in __io_remove_buffers()
374 while (!list_empty(&bl->buf_list)) { in __io_remove_buffers()
377 nxt = list_first_entry(&bl->buf_list, struct io_buffer, list); in __io_remove_buffers()
387 void io_put_bl(struct io_ring_ctx *ctx, struct io_buffer_list *bl) in io_put_bl() argument
389 if (atomic_dec_and_test(&bl->refs)) { in io_put_bl()
390 __io_remove_buffers(ctx, bl, -1U); in io_put_bl()
391 kfree_rcu(bl, rcu); in io_put_bl()
397 struct io_buffer_list *bl; in io_destroy_buffers() local
402 xa_for_each(&ctx->io_bl_xa, index, bl) { in io_destroy_buffers()
403 xa_erase(&ctx->io_bl_xa, bl->bgid); in io_destroy_buffers()
404 io_put_bl(ctx, bl); in io_destroy_buffers()
444 struct io_buffer_list *bl; in io_remove_buffers() local
450 bl = io_buffer_get_list(ctx, p->bgid); in io_remove_buffers()
451 if (bl) { in io_remove_buffers()
454 if (!(bl->flags & IOBL_BUF_RING)) in io_remove_buffers()
455 ret = __io_remove_buffers(ctx, bl, p->nbufs); in io_remove_buffers()
548 struct io_buffer_list *bl) in io_add_buffers() argument
560 list_move_tail(&buf->list, &bl->buf_list); in io_add_buffers()
577 struct io_buffer_list *bl; in io_provide_buffers() local
582 bl = io_buffer_get_list(ctx, p->bgid); in io_provide_buffers()
583 if (unlikely(!bl)) { in io_provide_buffers()
584 bl = kzalloc(sizeof(*bl), GFP_KERNEL_ACCOUNT); in io_provide_buffers()
585 if (!bl) { in io_provide_buffers()
589 INIT_LIST_HEAD(&bl->buf_list); in io_provide_buffers()
590 ret = io_buffer_add_list(ctx, bl, p->bgid); in io_provide_buffers()
596 kfree_rcu(bl, rcu); in io_provide_buffers()
601 if (bl->flags & IOBL_BUF_RING) { in io_provide_buffers()
606 ret = io_add_buffers(ctx, p, bl); in io_provide_buffers()
617 struct io_buffer_list *bl) in io_pin_pbuf_ring() argument
650 bl->buf_pages = pages; in io_pin_pbuf_ring()
651 bl->buf_nr_pages = nr_pages; in io_pin_pbuf_ring()
652 bl->buf_ring = br; in io_pin_pbuf_ring()
653 bl->flags |= IOBL_BUF_RING; in io_pin_pbuf_ring()
654 bl->flags &= ~IOBL_MMAP; in io_pin_pbuf_ring()
665 struct io_buffer_list *bl) in io_alloc_pbuf_ring() argument
671 bl->buf_ring = io_pages_map(&bl->buf_pages, &bl->buf_nr_pages, ring_size); in io_alloc_pbuf_ring()
672 if (IS_ERR(bl->buf_ring)) { in io_alloc_pbuf_ring()
673 bl->buf_ring = NULL; in io_alloc_pbuf_ring()
677 bl->flags |= (IOBL_BUF_RING | IOBL_MMAP); in io_alloc_pbuf_ring()
684 struct io_buffer_list *bl, *free_bl = NULL; in io_register_pbuf_ring() local
713 bl = io_buffer_get_list(ctx, reg.bgid); in io_register_pbuf_ring()
714 if (bl) { in io_register_pbuf_ring()
716 if (bl->flags & IOBL_BUF_RING || !list_empty(&bl->buf_list)) in io_register_pbuf_ring()
719 free_bl = bl = kzalloc(sizeof(*bl), GFP_KERNEL); in io_register_pbuf_ring()
720 if (!bl) in io_register_pbuf_ring()
725 ret = io_pin_pbuf_ring(&reg, bl); in io_register_pbuf_ring()
727 ret = io_alloc_pbuf_ring(ctx, &reg, bl); in io_register_pbuf_ring()
730 bl->nr_entries = reg.ring_entries; in io_register_pbuf_ring()
731 bl->mask = reg.ring_entries - 1; in io_register_pbuf_ring()
733 bl->flags |= IOBL_INC; in io_register_pbuf_ring()
735 io_buffer_add_list(ctx, bl, reg.bgid); in io_register_pbuf_ring()
746 struct io_buffer_list *bl; in io_unregister_pbuf_ring() local
757 bl = io_buffer_get_list(ctx, reg.bgid); in io_unregister_pbuf_ring()
758 if (!bl) in io_unregister_pbuf_ring()
760 if (!(bl->flags & IOBL_BUF_RING)) in io_unregister_pbuf_ring()
763 xa_erase(&ctx->io_bl_xa, bl->bgid); in io_unregister_pbuf_ring()
764 io_put_bl(ctx, bl); in io_unregister_pbuf_ring()
771 struct io_buffer_list *bl; in io_register_pbuf_status() local
781 bl = io_buffer_get_list(ctx, buf_status.buf_group); in io_register_pbuf_status()
782 if (!bl) in io_register_pbuf_status()
784 if (!(bl->flags & IOBL_BUF_RING)) in io_register_pbuf_status()
787 buf_status.head = bl->head; in io_register_pbuf_status()
797 struct io_buffer_list *bl; in io_pbuf_get_bl() local
812 bl = xa_load(&ctx->io_bl_xa, bgid); in io_pbuf_get_bl()
815 if (bl && bl->flags & IOBL_MMAP) in io_pbuf_get_bl()
816 ret = atomic_inc_not_zero(&bl->refs); in io_pbuf_get_bl()
820 return bl; in io_pbuf_get_bl()
829 struct io_buffer_list *bl; in io_pbuf_mmap() local
833 bl = io_pbuf_get_bl(ctx, bgid); in io_pbuf_mmap()
834 if (IS_ERR(bl)) in io_pbuf_mmap()
835 return PTR_ERR(bl); in io_pbuf_mmap()
837 ret = io_uring_mmap_pages(ctx, vma, bl->buf_pages, bl->buf_nr_pages); in io_pbuf_mmap()
838 io_put_bl(ctx, bl); in io_pbuf_mmap()