Lines Matching refs:iov
116 const struct iovec *iov = i->iov; \
120 iov, (I)) \
121 i->nr_segs -= iov - i->iov; \
122 i->iov = iov; \
177 const struct iovec *iov; in copy_page_to_iter_iovec() local
189 iov = i->iov; in copy_page_to_iter_iovec()
191 buf = iov->iov_base + skip; in copy_page_to_iter_iovec()
192 copy = min(bytes, iov->iov_len - skip); in copy_page_to_iter_iovec()
206 iov++; in copy_page_to_iter_iovec()
207 buf = iov->iov_base; in copy_page_to_iter_iovec()
208 copy = min(bytes, iov->iov_len); in copy_page_to_iter_iovec()
222 copy = min(bytes, iov->iov_len - skip); in copy_page_to_iter_iovec()
234 iov++; in copy_page_to_iter_iovec()
235 buf = iov->iov_base; in copy_page_to_iter_iovec()
236 copy = min(bytes, iov->iov_len); in copy_page_to_iter_iovec()
246 if (skip == iov->iov_len) { in copy_page_to_iter_iovec()
247 iov++; in copy_page_to_iter_iovec()
251 i->nr_segs -= iov - i->iov; in copy_page_to_iter_iovec()
252 i->iov = iov; in copy_page_to_iter_iovec()
261 const struct iovec *iov; in copy_page_from_iter_iovec() local
273 iov = i->iov; in copy_page_from_iter_iovec()
275 buf = iov->iov_base + skip; in copy_page_from_iter_iovec()
276 copy = min(bytes, iov->iov_len - skip); in copy_page_from_iter_iovec()
290 iov++; in copy_page_from_iter_iovec()
291 buf = iov->iov_base; in copy_page_from_iter_iovec()
292 copy = min(bytes, iov->iov_len); in copy_page_from_iter_iovec()
306 copy = min(bytes, iov->iov_len - skip); in copy_page_from_iter_iovec()
318 iov++; in copy_page_from_iter_iovec()
319 buf = iov->iov_base; in copy_page_from_iter_iovec()
320 copy = min(bytes, iov->iov_len); in copy_page_from_iter_iovec()
330 if (skip == iov->iov_len) { in copy_page_from_iter_iovec()
331 iov++; in copy_page_from_iter_iovec()
335 i->nr_segs -= iov - i->iov; in copy_page_from_iter_iovec()
336 i->iov = iov; in copy_page_from_iter_iovec()
453 for (p = i->iov, skip = i->iov_offset; count; p++, skip = 0) { in fault_in_iov_iter_readable()
492 for (p = i->iov, skip = i->iov_offset; count; p++, skip = 0) { in fault_in_iov_iter_writeable()
510 const struct iovec *iov, unsigned long nr_segs, in iov_iter_init() argument
518 .iov = iov, in iov_iter_init()
1047 const struct iovec *iov, *end; in iov_iter_iovec_advance() local
1054 for (iov = i->iov, end = iov + i->nr_segs; iov < end; iov++) { in iov_iter_iovec_advance()
1055 if (likely(size < iov->iov_len)) in iov_iter_iovec_advance()
1057 size -= iov->iov_len; in iov_iter_iovec_advance()
1060 i->nr_segs -= iov - i->iov; in iov_iter_iovec_advance()
1061 i->iov = iov; in iov_iter_iovec_advance()
1142 const struct iovec *iov = i->iov; in iov_iter_revert() local
1144 size_t n = (--iov)->iov_len; in iov_iter_revert()
1147 i->iov = iov; in iov_iter_revert()
1164 return min(i->count, i->iov->iov_len - i->iov_offset); in iov_iter_single_seg_count()
1279 size_t len = i->iov[k].iov_len - skip; in iov_iter_alignment_iovec()
1281 res |= (unsigned long)i->iov[k].iov_base + skip; in iov_iter_alignment_iovec()
1349 if (i->iov[k].iov_len) { in iov_iter_gap_alignment()
1350 unsigned long base = (unsigned long)i->iov[k].iov_base; in iov_iter_gap_alignment()
1353 v = base + i->iov[k].iov_len; in iov_iter_gap_alignment()
1354 if (size <= i->iov[k].iov_len) in iov_iter_gap_alignment()
1356 size -= i->iov[k].iov_len; in iov_iter_gap_alignment()
1482 unsigned long addr = (unsigned long)i->iov[k].iov_base + skip; in first_iovec_segment()
1483 size_t len = i->iov[k].iov_len - skip; in first_iovec_segment()
1774 for (p = i->iov; size; skip = 0, p++) { in iov_npages()
1852 return new->iov = kmemdup(new->iov, in dup_iter()
1858 static int copy_compat_iovec_from_user(struct iovec *iov, in copy_compat_iovec_from_user() argument
1880 iov[i].iov_base = compat_ptr(buf); in copy_compat_iovec_from_user()
1881 iov[i].iov_len = len; in copy_compat_iovec_from_user()
1890 static int copy_iovec_from_user(struct iovec *iov, in copy_iovec_from_user() argument
1895 if (copy_from_user(iov, uvec, nr_segs * sizeof(*uvec))) in copy_iovec_from_user()
1898 if ((ssize_t)iov[seg].iov_len < 0) in copy_iovec_from_user()
1909 struct iovec *iov = fast_iov; in iovec_from_user() local
1918 return iov; in iovec_from_user()
1922 iov = kmalloc_array(nr_segs, sizeof(struct iovec), GFP_KERNEL); in iovec_from_user()
1923 if (!iov) in iovec_from_user()
1928 ret = copy_compat_iovec_from_user(iov, uvec, nr_segs); in iovec_from_user()
1930 ret = copy_iovec_from_user(iov, uvec, nr_segs); in iovec_from_user()
1932 if (iov != fast_iov) in iovec_from_user()
1933 kfree(iov); in iovec_from_user()
1937 return iov; in iovec_from_user()
1946 struct iovec *iov; in __import_iovec() local
1948 iov = iovec_from_user(uvec, nr_segs, fast_segs, *iovp, compat); in __import_iovec()
1949 if (IS_ERR(iov)) { in __import_iovec()
1951 return PTR_ERR(iov); in __import_iovec()
1963 ssize_t len = (ssize_t)iov[seg].iov_len; in __import_iovec()
1965 if (!access_ok(iov[seg].iov_base, len)) { in __import_iovec()
1966 if (iov != *iovp) in __import_iovec()
1967 kfree(iov); in __import_iovec()
1974 iov[seg].iov_len = len; in __import_iovec()
1979 iov_iter_init(i, type, iov, nr_segs, total_len); in __import_iovec()
1980 if (iov == *iovp) in __import_iovec()
1983 *iovp = iov; in __import_iovec()
2019 struct iovec *iov, struct iov_iter *i) in import_single_range() argument
2026 iov->iov_base = buf; in import_single_range()
2027 iov->iov_len = len; in import_single_range()
2028 iov_iter_init(i, rw, iov, 1, len); in import_single_range()
2065 i->iov -= state->nr_segs - i->nr_segs; in iov_iter_restore()