Lines Matching refs:bl

35 static bool io_kbuf_inc_commit(struct io_buffer_list *bl, int len)  in io_kbuf_inc_commit()  argument
41 buf = io_ring_head_to_buf(bl->buf_ring, bl->head, bl->mask); in io_kbuf_inc_commit()
48 bl->head++; in io_kbuf_inc_commit()
55 struct io_buffer_list *bl, int len, int nr) in io_kbuf_commit() argument
64 if (bl->flags & IOBL_INC) in io_kbuf_commit()
65 return io_kbuf_inc_commit(bl, len); in io_kbuf_commit()
66 bl->head += nr; in io_kbuf_commit()
79 struct io_buffer_list *bl, unsigned int bgid) in io_buffer_add_list() argument
86 bl->bgid = bgid; in io_buffer_add_list()
88 return xa_err(xa_store(&ctx->io_bl_xa, bgid, bl, GFP_KERNEL)); in io_buffer_add_list()
104 struct io_buffer_list *bl; in io_kbuf_recycle_legacy() local
110 bl = io_buffer_get_list(ctx, buf->bgid); in io_kbuf_recycle_legacy()
111 list_add(&buf->list, &bl->buf_list); in io_kbuf_recycle_legacy()
120 struct io_buffer_list *bl) in io_provided_buffer_select() argument
122 if (!list_empty(&bl->buf_list)) { in io_provided_buffer_select()
125 kbuf = list_first_entry(&bl->buf_list, struct io_buffer, list); in io_provided_buffer_select()
129 if (list_empty(&bl->buf_list)) in io_provided_buffer_select()
140 struct io_buffer_list *bl, in io_provided_buffers_select() argument
145 buf = io_provided_buffer_select(req, len, bl); in io_provided_buffers_select()
155 struct io_buffer_list *bl, in io_ring_buffer_select() argument
158 struct io_uring_buf_ring *br = bl->buf_ring; in io_ring_buffer_select()
159 __u16 tail, head = bl->head; in io_ring_buffer_select()
170 buf = io_ring_head_to_buf(br, head, bl->mask); in io_ring_buffer_select()
174 req->buf_list = bl; in io_ring_buffer_select()
189 io_kbuf_commit(req, bl, *len, 1); in io_ring_buffer_select()
199 struct io_buffer_list *bl; in io_buffer_select() local
204 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffer_select()
205 if (likely(bl)) { in io_buffer_select()
206 if (bl->flags & IOBL_BUF_RING) in io_buffer_select()
207 ret = io_ring_buffer_select(req, len, bl, issue_flags); in io_buffer_select()
209 ret = io_provided_buffer_select(req, len, bl); in io_buffer_select()
219 struct io_buffer_list *bl) in io_ring_buffers_peek() argument
221 struct io_uring_buf_ring *br = bl->buf_ring; in io_ring_buffers_peek()
228 head = bl->head; in io_ring_buffers_peek()
233 buf = io_ring_head_to_buf(br, head, bl->mask); in io_ring_buffers_peek()
273 if (!(bl->flags & IOBL_INC)) in io_ring_buffers_peek()
286 buf = io_ring_head_to_buf(br, ++head, bl->mask); in io_ring_buffers_peek()
293 req->buf_list = bl; in io_ring_buffers_peek()
301 struct io_buffer_list *bl; in io_buffers_select() local
305 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffers_select()
306 if (unlikely(!bl)) in io_buffers_select()
309 if (bl->flags & IOBL_BUF_RING) { in io_buffers_select()
310 ret = io_ring_buffers_peek(req, arg, bl); in io_buffers_select()
320 io_kbuf_commit(req, bl, arg->out_len, ret); in io_buffers_select()
323 ret = io_provided_buffers_select(req, &arg->out_len, bl, arg->iovs); in io_buffers_select()
333 struct io_buffer_list *bl; in io_buffers_peek() local
338 bl = io_buffer_get_list(ctx, req->buf_index); in io_buffers_peek()
339 if (unlikely(!bl)) in io_buffers_peek()
342 if (bl->flags & IOBL_BUF_RING) { in io_buffers_peek()
343 ret = io_ring_buffers_peek(req, arg, bl); in io_buffers_peek()
350 return io_provided_buffers_select(req, &arg->max_len, bl, arg->iovs); in io_buffers_peek()
355 struct io_buffer_list *bl = req->buf_list; in __io_put_kbuf_ring() local
358 if (bl) { in __io_put_kbuf_ring()
359 ret = io_kbuf_commit(req, bl, len, nr); in __io_put_kbuf_ring()
360 req->buf_index = bl->bgid; in __io_put_kbuf_ring()
383 struct io_buffer_list *bl, unsigned nbufs) in __io_remove_buffers() argument
391 if (bl->flags & IOBL_BUF_RING) { in __io_remove_buffers()
392 i = bl->buf_ring->tail - bl->head; in __io_remove_buffers()
393 io_free_region(ctx, &bl->region); in __io_remove_buffers()
395 INIT_LIST_HEAD(&bl->buf_list); in __io_remove_buffers()
396 bl->flags &= ~IOBL_BUF_RING; in __io_remove_buffers()
403 while (!list_empty(&bl->buf_list)) { in __io_remove_buffers()
406 nxt = list_first_entry(&bl->buf_list, struct io_buffer, list); in __io_remove_buffers()
418 static void io_put_bl(struct io_ring_ctx *ctx, struct io_buffer_list *bl) in io_put_bl() argument
420 __io_remove_buffers(ctx, bl, -1U); in io_put_bl()
421 kfree(bl); in io_put_bl()
426 struct io_buffer_list *bl; in io_destroy_buffers() local
432 bl = xa_find(&ctx->io_bl_xa, &index, ULONG_MAX, XA_PRESENT); in io_destroy_buffers()
433 if (bl) in io_destroy_buffers()
434 xa_erase(&ctx->io_bl_xa, bl->bgid); in io_destroy_buffers()
436 if (!bl) in io_destroy_buffers()
438 io_put_bl(ctx, bl); in io_destroy_buffers()
442 static void io_destroy_bl(struct io_ring_ctx *ctx, struct io_buffer_list *bl) in io_destroy_bl() argument
445 WARN_ON_ONCE(xa_erase(&ctx->io_bl_xa, bl->bgid) != bl); in io_destroy_bl()
446 io_put_bl(ctx, bl); in io_destroy_bl()
472 struct io_buffer_list *bl; in io_remove_buffers() local
478 bl = io_buffer_get_list(ctx, p->bgid); in io_remove_buffers()
479 if (bl) { in io_remove_buffers()
482 if (!(bl->flags & IOBL_BUF_RING)) in io_remove_buffers()
483 ret = __io_remove_buffers(ctx, bl, p->nbufs); in io_remove_buffers()
531 struct io_buffer_list *bl) in io_add_buffers() argument
542 list_add_tail(&buf->list, &bl->buf_list); in io_add_buffers()
559 struct io_buffer_list *bl; in io_provide_buffers() local
564 bl = io_buffer_get_list(ctx, p->bgid); in io_provide_buffers()
565 if (unlikely(!bl)) { in io_provide_buffers()
566 bl = kzalloc(sizeof(*bl), GFP_KERNEL_ACCOUNT); in io_provide_buffers()
567 if (!bl) { in io_provide_buffers()
571 INIT_LIST_HEAD(&bl->buf_list); in io_provide_buffers()
572 ret = io_buffer_add_list(ctx, bl, p->bgid); in io_provide_buffers()
574 kfree(bl); in io_provide_buffers()
579 if (bl->flags & IOBL_BUF_RING) { in io_provide_buffers()
584 ret = io_add_buffers(ctx, p, bl); in io_provide_buffers()
597 struct io_buffer_list *bl, *free_bl = NULL; in io_register_pbuf_ring() local
619 bl = io_buffer_get_list(ctx, reg.bgid); in io_register_pbuf_ring()
620 if (bl) { in io_register_pbuf_ring()
622 if (bl->flags & IOBL_BUF_RING || !list_empty(&bl->buf_list)) in io_register_pbuf_ring()
624 io_destroy_bl(ctx, bl); in io_register_pbuf_ring()
627 free_bl = bl = kzalloc(sizeof(*bl), GFP_KERNEL); in io_register_pbuf_ring()
628 if (!bl) in io_register_pbuf_ring()
640 ret = io_create_region_mmap_safe(ctx, &bl->region, &rd, mmap_offset); in io_register_pbuf_ring()
643 br = io_region_get_ptr(&bl->region); in io_register_pbuf_ring()
662 bl->nr_entries = reg.ring_entries; in io_register_pbuf_ring()
663 bl->mask = reg.ring_entries - 1; in io_register_pbuf_ring()
664 bl->flags |= IOBL_BUF_RING; in io_register_pbuf_ring()
665 bl->buf_ring = br; in io_register_pbuf_ring()
667 bl->flags |= IOBL_INC; in io_register_pbuf_ring()
668 io_buffer_add_list(ctx, bl, reg.bgid); in io_register_pbuf_ring()
671 io_free_region(ctx, &bl->region); in io_register_pbuf_ring()
679 struct io_buffer_list *bl; in io_unregister_pbuf_ring() local
690 bl = io_buffer_get_list(ctx, reg.bgid); in io_unregister_pbuf_ring()
691 if (!bl) in io_unregister_pbuf_ring()
693 if (!(bl->flags & IOBL_BUF_RING)) in io_unregister_pbuf_ring()
697 xa_erase(&ctx->io_bl_xa, bl->bgid); in io_unregister_pbuf_ring()
699 io_put_bl(ctx, bl); in io_unregister_pbuf_ring()
706 struct io_buffer_list *bl; in io_register_pbuf_status() local
716 bl = io_buffer_get_list(ctx, buf_status.buf_group); in io_register_pbuf_status()
717 if (!bl) in io_register_pbuf_status()
719 if (!(bl->flags & IOBL_BUF_RING)) in io_register_pbuf_status()
722 buf_status.head = bl->head; in io_register_pbuf_status()
732 struct io_buffer_list *bl; in io_pbuf_get_region() local
736 bl = xa_load(&ctx->io_bl_xa, bgid); in io_pbuf_get_region()
737 if (!bl || !(bl->flags & IOBL_BUF_RING)) in io_pbuf_get_region()
739 return &bl->region; in io_pbuf_get_region()