Lines Matching refs:vring
91 struct vring vring; member
125 } vring; member
502 struct vring_virtqueue *vring = to_vvq(vq); in virtqueue_add_desc_split() local
503 struct vring_desc_extra *extra = vring->split.desc_extra; in virtqueue_add_desc_split()
559 WARN_ON_ONCE(total_sg > vq->split.vring.num && !vq->indirect); in virtqueue_add_split()
570 desc = vq->split.vring.desc; in virtqueue_add_split()
624 vq->split.desc_extra[prev & (vq->split.vring.num - 1)].flags &= in virtqueue_add_split()
635 virtqueue_add_desc_split(_vq, vq->split.vring.desc, in virtqueue_add_split()
660 avail = vq->split.avail_idx_shadow & (vq->split.vring.num - 1); in virtqueue_add_split()
661 vq->split.vring.avail->ring[avail] = cpu_to_virtio16(_vq->vdev, head); in virtqueue_add_split()
667 vq->split.vring.avail->idx = cpu_to_virtio16(_vq->vdev, in virtqueue_add_split()
726 vring_avail_event(&vq->split.vring)), in virtqueue_kick_prepare_split()
729 needs_kick = !(vq->split.vring.used->flags & in virtqueue_kick_prepare_split()
749 while (vq->split.vring.desc[i].flags & nextflag) { in detach_buf_split()
790 vq->split.vring.used->idx); in more_used_split()
818 last_used = (vq->last_used_idx & (vq->split.vring.num - 1)); in virtqueue_get_buf_ctx_split()
820 vq->split.vring.used->ring[last_used].id); in virtqueue_get_buf_ctx_split()
822 vq->split.vring.used->ring[last_used].len); in virtqueue_get_buf_ctx_split()
824 if (unlikely(i >= vq->split.vring.num)) { in virtqueue_get_buf_ctx_split()
842 &vring_used_event(&vq->split.vring), in virtqueue_get_buf_ctx_split()
859 vring_used_event(&vq->split.vring) = 0x0; in virtqueue_disable_cb_split()
861 vq->split.vring.avail->flags = in virtqueue_disable_cb_split()
882 vq->split.vring.avail->flags = in virtqueue_enable_cb_prepare_split()
886 vring_used_event(&vq->split.vring) = cpu_to_virtio16(_vq->vdev, in virtqueue_enable_cb_prepare_split()
897 vq->split.vring.used->idx); in virtqueue_poll_split()
915 vq->split.vring.avail->flags = in virtqueue_enable_cb_delayed_split()
923 &vring_used_event(&vq->split.vring), in virtqueue_enable_cb_delayed_split()
926 if (unlikely((u16)(virtio16_to_cpu(_vq->vdev, vq->split.vring.used->idx) in virtqueue_enable_cb_delayed_split()
944 for (i = 0; i < vq->split.vring.num; i++) { in virtqueue_detach_unused_buf_split()
951 vq->split.vring.avail->idx = cpu_to_virtio16(_vq->vdev, in virtqueue_detach_unused_buf_split()
957 BUG_ON(vq->vq.num_free != vq->split.vring.num); in virtqueue_detach_unused_buf_split()
977 vring_split->vring.avail->flags = cpu_to_virtio16(vdev, in virtqueue_vring_init_split()
986 num = vq->split.vring.num; in virtqueue_reinit_split()
988 vq->split.vring.avail->flags = 0; in virtqueue_reinit_split()
989 vq->split.vring.avail->idx = 0; in virtqueue_reinit_split()
992 vq->split.vring.avail->ring[num] = 0; in virtqueue_reinit_split()
994 vq->split.vring.used->flags = 0; in virtqueue_reinit_split()
995 vq->split.vring.used->idx = 0; in virtqueue_reinit_split()
998 *(__virtio16 *)&(vq->split.vring.used->ring[num]) = 0; in virtqueue_reinit_split()
1018 u32 num = vring_split->vring.num; in vring_alloc_state_extra_split()
1044 vring_split->vring.desc, in vring_free_split()
1092 vring_init(&vring_split->vring, num, queue, vring_align); in vring_alloc_queue_split()
1159 virtqueue_init(vq, vring_split.vring.num); in virtqueue_resize_split()
1270 BUG_ON(id == vq->packed.vring.num); in virtqueue_add_indirect_packed()
1294 vq->packed.vring.desc[head].addr = cpu_to_le64(addr); in virtqueue_add_indirect_packed()
1295 vq->packed.vring.desc[head].len = cpu_to_le32(total_sg * in virtqueue_add_indirect_packed()
1297 vq->packed.vring.desc[head].id = cpu_to_le16(id); in virtqueue_add_indirect_packed()
1313 vq->packed.vring.desc[head].flags = cpu_to_le16(VRING_DESC_F_INDIRECT | in virtqueue_add_indirect_packed()
1321 if (n >= vq->packed.vring.num) { in virtqueue_add_indirect_packed()
1401 WARN_ON_ONCE(total_sg > vq->packed.vring.num && !vq->indirect); in virtqueue_add_packed()
1403 desc = vq->packed.vring.desc; in virtqueue_add_packed()
1415 BUG_ON(id == vq->packed.vring.num); in virtqueue_add_packed()
1447 if ((unlikely(++i >= vq->packed.vring.num))) { in virtqueue_add_packed()
1478 vq->packed.vring.desc[head].flags = head_flags; in virtqueue_add_packed()
1499 if (i >= vq->packed.vring.num) in virtqueue_add_packed()
1532 snapshot.u32 = *(u32 *)vq->packed.vring.device; in virtqueue_kick_prepare_packed()
1548 event_idx -= vq->packed.vring.num; in virtqueue_kick_prepare_packed()
1608 flags = le16_to_cpu(vq->packed.vring.desc[idx].flags); in is_used_desc_packed()
1655 id = le16_to_cpu(vq->packed.vring.desc[last_used].id); in virtqueue_get_buf_ctx_packed()
1656 *len = le32_to_cpu(vq->packed.vring.desc[last_used].len); in virtqueue_get_buf_ctx_packed()
1658 if (unlikely(id >= vq->packed.vring.num)) { in virtqueue_get_buf_ctx_packed()
1672 if (unlikely(last_used >= vq->packed.vring.num)) { in virtqueue_get_buf_ctx_packed()
1673 last_used -= vq->packed.vring.num; in virtqueue_get_buf_ctx_packed()
1687 &vq->packed.vring.driver->off_wrap, in virtqueue_get_buf_ctx_packed()
1702 vq->packed.vring.driver->flags = in virtqueue_disable_cb_packed()
1719 vq->packed.vring.driver->off_wrap = in virtqueue_enable_cb_prepare_packed()
1732 vq->packed.vring.driver->flags = in virtqueue_enable_cb_prepare_packed()
1767 bufs = (vq->packed.vring.num - vq->vq.num_free) * 3 / 4; in virtqueue_enable_cb_delayed_packed()
1772 if (used_idx >= vq->packed.vring.num) { in virtqueue_enable_cb_delayed_packed()
1773 used_idx -= vq->packed.vring.num; in virtqueue_enable_cb_delayed_packed()
1777 vq->packed.vring.driver->off_wrap = cpu_to_le16(used_idx | in virtqueue_enable_cb_delayed_packed()
1791 vq->packed.vring.driver->flags = in virtqueue_enable_cb_delayed_packed()
1821 for (i = 0; i < vq->packed.vring.num; i++) { in virtqueue_detach_unused_buf_packed()
1831 BUG_ON(vq->vq.num_free != vq->packed.vring.num); in virtqueue_detach_unused_buf_packed()
1859 if (vring_packed->vring.desc) in vring_free_packed()
1861 vring_packed->vring.desc, in vring_free_packed()
1865 if (vring_packed->vring.driver) in vring_free_packed()
1867 vring_packed->vring.driver, in vring_free_packed()
1871 if (vring_packed->vring.device) in vring_free_packed()
1873 vring_packed->vring.device, in vring_free_packed()
1899 vring_packed->vring.desc = ring; in vring_alloc_queue_packed()
1912 vring_packed->vring.driver = driver; in vring_alloc_queue_packed()
1923 vring_packed->vring.device = device; in vring_alloc_queue_packed()
1926 vring_packed->vring.num = num; in vring_alloc_queue_packed()
1939 u32 num = vring_packed->vring.num; in vring_alloc_state_extra_packed()
1973 vring_packed->vring.driver->flags = in virtqueue_vring_init_packed()
1989 memset(vq->packed.vring.device, 0, vq->packed.event_size_in_bytes); in virtqueue_reinit_packed()
1990 memset(vq->packed.vring.driver, 0, vq->packed.event_size_in_bytes); in virtqueue_reinit_packed()
1993 memset(vq->packed.vring.desc, 0, vq->packed.ring_size_in_bytes); in virtqueue_reinit_packed()
1995 virtqueue_init(vq, vq->packed.vring.num); in virtqueue_reinit_packed()
2087 virtqueue_init(vq, vring_packed.vring.num); in virtqueue_resize_packed()
2550 virtqueue_init(vq, vring_split->vring.num); in __vring_new_virtqueue()
2649 if ((vq->packed_ring ? vq->packed.vring.num : vq->split.vring.num) == num) in virtqueue_resize()
2694 vring_init(&vring_split.vring, num, pages, vring_align); in vring_new_virtqueue()
2709 vq->packed.vring.desc, in vring_free()
2715 vq->packed.vring.driver, in vring_free()
2721 vq->packed.vring.device, in vring_free()
2730 vq->split.vring.desc, in vring_free()
2794 return vq->packed_ring ? vq->packed.vring.num : vq->split.vring.num; in virtqueue_get_vring_size()
2894 ((char *)vq->split.vring.avail - (char *)vq->split.vring.desc); in virtqueue_get_avail_addr()
2908 ((char *)vq->split.vring.used - (char *)vq->split.vring.desc); in virtqueue_get_used_addr()
2913 const struct vring *virtqueue_get_vring(struct virtqueue *vq) in virtqueue_get_vring()
2915 return &to_vvq(vq)->split.vring; in virtqueue_get_vring()