Lines Matching refs:iov

83 void vringh_kiov_advance(struct vringh_kiov *iov, size_t len)  in vringh_kiov_advance()  argument
85 while (len && iov->i < iov->used) { in vringh_kiov_advance()
86 size_t partlen = min(iov->iov[iov->i].iov_len, len); in vringh_kiov_advance()
88 iov->consumed += partlen; in vringh_kiov_advance()
89 iov->iov[iov->i].iov_len -= partlen; in vringh_kiov_advance()
90 iov->iov[iov->i].iov_base += partlen; in vringh_kiov_advance()
92 if (!iov->iov[iov->i].iov_len) { in vringh_kiov_advance()
94 iov->iov[iov->i].iov_len = iov->consumed; in vringh_kiov_advance()
95 iov->iov[iov->i].iov_base -= iov->consumed; in vringh_kiov_advance()
97 iov->consumed = 0; in vringh_kiov_advance()
98 iov->i++; in vringh_kiov_advance()
108 struct vringh_kiov *iov, in vringh_iov_xfer() argument
116 while (len && iov->i < iov->used) { in vringh_iov_xfer()
119 partlen = min(iov->iov[iov->i].iov_len, len); in vringh_iov_xfer()
120 err = xfer(vrh, iov->iov[iov->i].iov_base, ptr, partlen); in vringh_iov_xfer()
127 vringh_kiov_advance(iov, partlen); in vringh_iov_xfer()
208 static int resize_iovec(struct vringh_kiov *iov, gfp_t gfp) in resize_iovec() argument
211 unsigned int flag, new_num = (iov->max_num & ~VRINGH_IOV_ALLOCATED) * 2; in resize_iovec()
216 flag = (iov->max_num & VRINGH_IOV_ALLOCATED); in resize_iovec()
218 new = krealloc_array(iov->iov, new_num, in resize_iovec()
223 memcpy(new, iov->iov, in resize_iovec()
224 iov->max_num * sizeof(struct iovec)); in resize_iovec()
230 iov->iov = new; in resize_iovec()
231 iov->max_num = (new_num | flag); in resize_iovec()
316 struct vringh_kiov *iov; in __vringh_iov() local
364 iov = wiov; in __vringh_iov()
366 iov = riov; in __vringh_iov()
375 if (!iov) { in __vringh_iov()
393 if (unlikely(iov->used == (iov->max_num & ~VRINGH_IOV_ALLOCATED))) { in __vringh_iov()
394 err = resize_iovec(iov, gfp); in __vringh_iov()
399 iov->iov[iov->used].iov_base = addr; in __vringh_iov()
400 iov->iov[iov->used].iov_len = len; in __vringh_iov()
401 iov->used++; in __vringh_iov()
713 BUILD_BUG_ON(offsetof(struct vringh_kiov, iov) != in vringh_getdesc_user()
714 offsetof(struct vringh_iov, iov)); in vringh_getdesc_user()
1099 struct bio_vec iov[], in iotlb_translate() argument
1129 bvec_set_page(&iov[ret], pfn_to_page(pfn), min(len - s, size), in iotlb_translate()
1150 struct bio_vec iov[16]; in copy_from_iotlb() local
1157 iov, ARRAY_SIZE(iov), VHOST_MAP_RO); in copy_from_iotlb()
1159 ret = ARRAY_SIZE(iov); in copy_from_iotlb()
1163 iov_iter_bvec(&iter, ITER_SOURCE, iov, ret, translated); in copy_from_iotlb()
1183 struct bio_vec iov[16]; in copy_to_iotlb() local
1190 iov, ARRAY_SIZE(iov), VHOST_MAP_WO); in copy_to_iotlb()
1192 ret = ARRAY_SIZE(iov); in copy_to_iotlb()
1196 iov_iter_bvec(&iter, ITER_DEST, iov, ret, translated); in copy_to_iotlb()
1213 struct bio_vec iov; in getu16_iotlb() local
1219 &iov, 1, VHOST_MAP_RO); in getu16_iotlb()
1223 kaddr = kmap_atomic(iov.bv_page); in getu16_iotlb()
1224 from = kaddr + iov.bv_offset; in getu16_iotlb()
1234 struct bio_vec iov; in putu16_iotlb() local
1240 &iov, 1, VHOST_MAP_WO); in putu16_iotlb()
1244 kaddr = kmap_atomic(iov.bv_page); in putu16_iotlb()
1245 to = kaddr + iov.bv_offset; in putu16_iotlb()