Lines Matching refs:src_start

492 					      unsigned long src_start,  in mfill_atomic_hugetlb()  argument
519 src_addr = src_start; in mfill_atomic_hugetlb()
563 while (src_addr < src_start + len) { in mfill_atomic_hugetlb()
648 unsigned long src_start,
699 unsigned long src_start, in mfill_atomic() argument
718 VM_WARN_ON_ONCE(src_start + len <= src_start); in mfill_atomic()
721 src_addr = src_start; in mfill_atomic()
767 src_start, len, flags); in mfill_atomic()
775 while (src_addr < src_start + len) { in mfill_atomic()
862 unsigned long src_start, unsigned long len, in mfill_atomic_copy() argument
865 return mfill_atomic(ctx, dst_start, src_start, len, in mfill_atomic_copy()
1530 unsigned long src_start, in find_vmas_mm_locked() argument
1543 if (src_start >= vma->vm_start && src_start < vma->vm_end) in find_vmas_mm_locked()
1546 vma = vma_lookup(mm, src_start); in find_vmas_mm_locked()
1557 unsigned long src_start, in uffd_move_lock() argument
1573 if (src_start >= vma->vm_start && src_start < vma->vm_end) { in uffd_move_lock()
1590 *src_vmap = lock_vma_under_rcu(mm, src_start); in uffd_move_lock()
1598 err = find_vmas_mm_locked(mm, dst_start, src_start, dst_vmap, src_vmap); in uffd_move_lock()
1633 unsigned long src_start, in uffd_move_lock() argument
1640 err = find_vmas_mm_locked(mm, dst_start, src_start, dst_vmap, src_vmap); in uffd_move_lock()
1731 unsigned long src_start, unsigned long len, __u64 mode) in move_pages() argument
1741 VM_WARN_ON_ONCE(src_start & ~PAGE_MASK); in move_pages()
1746 VM_WARN_ON_ONCE(src_start + len < src_start); in move_pages()
1749 err = uffd_move_lock(mm, dst_start, src_start, &dst_vma, &src_vma); in move_pages()
1766 if (src_start + len > src_vma->vm_end) in move_pages()
1778 for (src_addr = src_start, dst_addr = dst_start; in move_pages()
1779 src_addr < src_start + len;) { in move_pages()
1822 if (move_splits_huge_pmd(dst_addr, src_addr, src_start + len) || in move_pages()