Lines Matching refs:vmi

147 static inline struct vm_area_struct *vma_prev_limit(struct vma_iterator *vmi,  in vma_prev_limit()  argument
150 return mas_prev(&vmi->mas, min); in vma_prev_limit()
153 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
156 vmi->mas.index = start; in vma_iter_clear_gfp()
157 vmi->mas.last = end - 1; in vma_iter_clear_gfp()
158 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
159 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
183 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *brkvma,
194 struct vma_iterator vmi; in SYSCALL_DEFINE1() local
242 vma_iter_init(&vmi, mm, newbrk); in SYSCALL_DEFINE1()
243 brkvma = vma_find(&vmi, oldbrk); in SYSCALL_DEFINE1()
252 ret = do_vma_munmap(&vmi, brkvma, newbrk, oldbrk, &uf, true); in SYSCALL_DEFINE1()
270 vma_iter_init(&vmi, mm, oldbrk); in SYSCALL_DEFINE1()
271 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap); in SYSCALL_DEFINE1()
275 brkvma = vma_prev_limit(&vmi, mm->start_brk); in SYSCALL_DEFINE1()
277 if (do_brk_flags(&vmi, brkvma, oldbrk, newbrk - oldbrk, 0) < 0) in SYSCALL_DEFINE1()
410 VMA_ITERATOR(vmi, mm, addr); in count_vma_pages_range()
414 for_each_vma_range(vmi, vma, end) { in count_vma_pages_range()
437 VMA_ITERATOR(vmi, mm, 0); in vma_link()
440 if (vma_iter_prealloc(&vmi)) in vma_link()
448 vma_iter_store(&vmi, vma); in vma_link()
551 struct vma_iterator *vmi, struct mm_struct *mm) in vma_complete() argument
572 vma_iter_store(vmi, vp->insert); in vma_complete()
660 int vma_expand(struct vma_iterator *vmi, struct vm_area_struct *vma, in vma_expand() argument
683 if (vma_iter_prealloc(vmi)) in vma_expand()
688 if (vma_iter_addr(vmi) != start) in vma_expand()
689 vma_iter_set(vmi, start); in vma_expand()
696 vma_iter_store(vmi, vma); in vma_expand()
698 vma_complete(&vp, vmi, vma->vm_mm); in vma_expand()
715 int vma_shrink(struct vma_iterator *vmi, struct vm_area_struct *vma, in vma_shrink() argument
722 if (vma_iter_prealloc(vmi)) in vma_shrink()
730 vma_iter_clear(vmi, vma->vm_start, start); in vma_shrink()
733 vma_iter_clear(vmi, end, vma->vm_end); in vma_shrink()
738 vma_complete(&vp, vmi, vma->vm_mm); in vma_shrink()
886 struct vm_area_struct *vma_merge(struct vma_iterator *vmi, struct mm_struct *mm, in vma_merge() argument
935 vma_prev(vmi); in vma_merge()
994 if (vma_iter_prealloc(vmi)) in vma_merge()
1011 vma_iter_store(vmi, vma); in vma_merge()
1018 vma_iter_store(vmi, next); in vma_merge()
1022 vma_complete(&vp, vmi, mm); in vma_merge()
1023 vma_iter_free(vmi); in vma_merge()
2154 int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in __split_vma() argument
2177 if (vma_iter_prealloc(vmi)) in __split_vma()
2214 vma_complete(&vp, vmi, vma->vm_mm); in __split_vma()
2218 vma_next(vmi); in __split_vma()
2225 vma_iter_free(vmi); in __split_vma()
2236 int split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in split_vma() argument
2242 return __split_vma(vmi, vma, addr, new_below); in split_vma()
2271 do_vmi_align_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_vmi_align_munmap() argument
2302 error = __split_vma(vmi, vma, start, 0); in do_vmi_align_munmap()
2306 vma = vma_iter_load(vmi); in do_vmi_align_munmap()
2309 prev = vma_prev(vmi); in do_vmi_align_munmap()
2311 vma_iter_set(vmi, start); in do_vmi_align_munmap()
2317 for_each_vma_range(*vmi, next, end) { in do_vmi_align_munmap()
2320 error = __split_vma(vmi, next, end, 0); in do_vmi_align_munmap()
2335 next = vma_next(vmi); in do_vmi_align_munmap()
2359 vma_iter_set(vmi, start); in do_vmi_align_munmap()
2362 for_each_vma_range(*vmi, vma_mas, end) { in do_vmi_align_munmap()
2372 vma_iter_set(vmi, start); in do_vmi_align_munmap()
2373 if (vma_iter_clear_gfp(vmi, start, end, GFP_KERNEL)) in do_vmi_align_munmap()
2430 int do_vmi_munmap(struct vma_iterator *vmi, struct mm_struct *mm, in do_vmi_munmap() argument
2448 vma = vma_find(vmi, end); in do_vmi_munmap()
2452 return do_vmi_align_munmap(vmi, vma, mm, start, end, uf, downgrade); in do_vmi_munmap()
2464 VMA_ITERATOR(vmi, mm, start); in do_munmap()
2466 return do_vmi_munmap(&vmi, mm, start, len, uf, false); in do_munmap()
2482 VMA_ITERATOR(vmi, mm, addr); in mmap_region()
2500 if (do_vmi_munmap(&vmi, mm, addr, len, uf, false)) in mmap_region()
2513 next = vma_next(&vmi); in mmap_region()
2514 prev = vma_prev(&vmi); in mmap_region()
2542 !vma_expand(&vmi, vma, merge_start, merge_end, vm_pgoff, next)) { in mmap_region()
2559 vma_iter_set(&vmi, addr); in mmap_region()
2586 vma_iter_set(&vmi, addr); in mmap_region()
2592 merge = vma_merge(&vmi, mm, prev, vma->vm_start, in mmap_region()
2638 if (vma_iter_prealloc(&vmi)) in mmap_region()
2644 vma_iter_store(&vmi, vma); in mmap_region()
2726 VMA_ITERATOR(vmi, mm, start); in __vm_munmap()
2731 ret = do_vmi_munmap(&vmi, mm, start, len, &uf, downgrade); in __vm_munmap()
2797 VMA_ITERATOR(vmi, mm, vma->vm_end); in SYSCALL_DEFINE5()
2800 for_each_vma_range(vmi, next, start + size) { in SYSCALL_DEFINE5()
2856 int do_vma_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_vma_munmap() argument
2864 ret = do_vmi_align_munmap(vmi, vma, mm, start, end, uf, downgrade); in do_vma_munmap()
2881 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *vma, in do_brk_flags() argument
2909 if (vma_iter_prealloc(vmi)) in do_brk_flags()
2917 vma_iter_store(vmi, vma); in do_brk_flags()
2919 vma_complete(&vp, vmi, mm); in do_brk_flags()
2935 if (vma_iter_store_gfp(vmi, vma, GFP_KERNEL)) in do_brk_flags()
2964 VMA_ITERATOR(vmi, mm, addr); in vm_brk_flags()
2983 ret = do_vmi_munmap(&vmi, mm, addr, len, &uf, 0); in vm_brk_flags()
2987 vma = vma_prev(&vmi); in vm_brk_flags()
2988 ret = do_brk_flags(&vmi, vma, addr, len, flags); in vm_brk_flags()
3124 VMA_ITERATOR(vmi, mm, addr); in copy_vma()
3140 new_vma = vma_merge(&vmi, mm, prev, addr, addr + len, vma->vm_flags, in copy_vma()