Lines Matching refs:mas

150 	return mas_prev(&vmi->mas, min);  in vma_prev_limit()
156 vmi->mas.index = start; in vma_iter_clear_gfp()
157 vmi->mas.last = end - 1; in vma_iter_clear_gfp()
158 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
159 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
308 MA_STATE(mas, mt, 0, 0); in validate_mm_mt()
311 mas_for_each(&mas, vma_mt, ULONG_MAX) { in validate_mm_mt()
312 if ((vma_mt->vm_start != mas.index) || in validate_mm_mt()
313 (vma_mt->vm_end - 1 != mas.last)) { in validate_mm_mt()
318 mas.index, mas.last); in validate_mm_mt()
322 mt_dump(mas.tree); in validate_mm_mt()
323 if (vma_mt->vm_end != mas.last + 1) { in validate_mm_mt()
326 mas.index, mas.last); in validate_mm_mt()
327 mt_dump(mas.tree); in validate_mm_mt()
329 VM_BUG_ON_MM(vma_mt->vm_end != mas.last + 1, mm); in validate_mm_mt()
330 if (vma_mt->vm_start != mas.index) { in validate_mm_mt()
333 mt_dump(mas.tree); in validate_mm_mt()
335 VM_BUG_ON_MM(vma_mt->vm_start != mas.index, mm); in validate_mm_mt()
345 MA_STATE(mas, &mm->mm_mt, 0, 0); in validate_mm()
349 mas_for_each(&mas, vma, ULONG_MAX) { in validate_mm()
1095 MA_STATE(mas, &vma->vm_mm->mm_mt, vma->vm_end, vma->vm_end); in find_mergeable_anon_vma()
1100 next = mas_walk(&mas); in find_mergeable_anon_vma()
1107 prev = mas_prev(&mas, 0); in find_mergeable_anon_vma()
1109 prev = mas_prev(&mas, 0); in find_mergeable_anon_vma()
1523 MA_STATE(mas, &current->mm->mm_mt, 0, 0); in unmapped_area()
1530 if (mas_empty_area(&mas, info->low_limit, info->high_limit - 1, in unmapped_area()
1534 gap = mas.index; in unmapped_area()
1553 MA_STATE(mas, &current->mm->mm_mt, 0, 0); in unmapped_area_topdown()
1559 if (mas_empty_area_rev(&mas, info->low_limit, info->high_limit - 1, in unmapped_area_topdown()
1563 gap = mas.last + 1 - info->length; in unmapped_area_topdown()
1810 MA_STATE(mas, &mm->mm_mt, addr, addr); in find_vma_prev()
1812 vma = mas_walk(&mas); in find_vma_prev()
1813 *pprev = mas_prev(&mas, 0); in find_vma_prev()
1815 vma = mas_next(&mas, ULONG_MAX); in find_vma_prev()
1869 MA_STATE(mas, &mm->mm_mt, 0, 0); in expand_upwards()
1894 if (mas_preallocate(&mas, GFP_KERNEL)) in expand_upwards()
1899 mas_destroy(&mas); in expand_upwards()
1937 mas_set_range(&mas, vma->vm_start, address - 1); in expand_upwards()
1938 mas_store_prealloc(&mas, vma); in expand_upwards()
1948 mas_destroy(&mas); in expand_upwards()
1959 MA_STATE(mas, &mm->mm_mt, vma->vm_start, vma->vm_start); in expand_downwards()
1968 prev = mas_prev(&mas, 0); in expand_downwards()
1976 if (mas_preallocate(&mas, GFP_KERNEL)) in expand_downwards()
1981 mas_destroy(&mas); in expand_downwards()
2020 mas_set_range(&mas, address, vma->vm_end - 1); in expand_downwards()
2021 mas_store_prealloc(&mas, vma); in expand_downwards()
2031 mas_destroy(&mas); in expand_downwards()
2109 static inline void remove_mt(struct mm_struct *mm, struct ma_state *mas) in remove_mt() argument
2116 mas_for_each(mas, vma, ULONG_MAX) { in remove_mt()
3015 MA_STATE(mas, &mm->mm_mt, 0, 0); in exit_mmap()
3024 vma = mas_find(&mas, ULONG_MAX); in exit_mmap()
3060 } while ((vma = mas_find(&mas, ULONG_MAX)) != NULL); in exit_mmap()
3490 MA_STATE(mas, &mm->mm_mt, 0, 0); in mm_take_all_locks()
3496 mas_for_each(&mas, vma, ULONG_MAX) { in mm_take_all_locks()
3504 mas_set(&mas, 0); in mm_take_all_locks()
3505 mas_for_each(&mas, vma, ULONG_MAX) { in mm_take_all_locks()
3513 mas_set(&mas, 0); in mm_take_all_locks()
3514 mas_for_each(&mas, vma, ULONG_MAX) { in mm_take_all_locks()
3573 MA_STATE(mas, &mm->mm_mt, 0, 0); in mm_drop_all_locks()
3578 mas_for_each(&mas, vma, ULONG_MAX) { in mm_drop_all_locks()