Lines Matching refs:mm

64 static struct vm_area_struct *alloc_vma(struct mm_struct *mm,  in alloc_vma()  argument
70 struct vm_area_struct *ret = vm_area_alloc(mm); in alloc_vma()
85 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma) in attach_vma() argument
89 res = vma_link(mm, vma); in attach_vma()
102 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
108 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags); in alloc_and_link_vma()
113 if (attach_vma(mm, vma)) { in alloc_and_link_vma()
211 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
232 return alloc_and_link_vma(mm, start, end, pgoff, vm_flags); in try_merge_new_vma()
249 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
263 mtree_destroy(&mm->mm_mt); in cleanup_mm()
264 mm->map_count = 0; in cleanup_mm()
305 struct mm_struct mm = {}; in test_simple_merge() local
306 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, vm_flags); in test_simple_merge()
307 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, vm_flags); in test_simple_merge()
308 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
310 .mm = &mm, in test_simple_merge()
318 ASSERT_FALSE(attach_vma(&mm, vma_left)); in test_simple_merge()
319 ASSERT_FALSE(attach_vma(&mm, vma_right)); in test_simple_merge()
330 mtree_destroy(&mm.mm_mt); in test_simple_merge()
339 struct mm_struct mm = {}; in test_simple_modify() local
340 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags); in test_simple_modify()
341 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
343 ASSERT_FALSE(attach_vma(&mm, init_vma)); in test_simple_modify()
390 mtree_destroy(&mm.mm_mt); in test_simple_modify()
398 struct mm_struct mm = {}; in test_simple_expand() local
399 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, vm_flags); in test_simple_expand()
400 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
409 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_expand()
418 mtree_destroy(&mm.mm_mt); in test_simple_expand()
426 struct mm_struct mm = {}; in test_simple_shrink() local
427 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags); in test_simple_shrink()
428 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
430 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_shrink()
439 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
447 struct mm_struct mm = {}; in test_merge_new() local
448 VMA_ITERATOR(vmi, &mm, 0); in test_merge_new()
450 .mm = &mm, in test_merge_new()
476 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_merge_new()
482 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in test_merge_new()
487 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, vm_flags); in test_merge_new()
498 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, vm_flags, &merged); in test_merge_new()
503 ASSERT_EQ(mm.map_count, 4); in test_merge_new()
513 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, vm_flags, &merged); in test_merge_new()
522 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
530 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, vm_flags, &merged); in test_merge_new()
539 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
549 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, vm_flags, &merged); in test_merge_new()
558 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
567 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, vm_flags, &merged); in test_merge_new()
576 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
585 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, vm_flags, &merged); in test_merge_new()
594 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
602 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, vm_flags, &merged); in test_merge_new()
611 ASSERT_EQ(mm.map_count, 1); in test_merge_new()
636 mtree_destroy(&mm.mm_mt); in test_merge_new()
643 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
644 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
646 .mm = &mm, in test_vma_merge_special_flags()
664 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_special_flags()
694 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in test_vma_merge_special_flags()
708 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
715 struct mm_struct mm = {}; in test_vma_merge_with_close() local
716 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
718 .mm = &mm, in test_vma_merge_with_close()
794 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
795 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
805 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
819 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
820 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
834 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
847 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
848 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
861 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
875 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
876 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
877 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
887 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
901 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
902 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
903 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
916 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
924 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
925 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
927 .mm = &mm, in test_vma_merge_new_with_close()
930 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_vma_merge_new_with_close()
931 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, vm_flags); in test_vma_merge_new_with_close()
970 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
972 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
979 struct mm_struct mm = {}; in test_merge_existing() local
980 VMA_ITERATOR(vmi, &mm, 0); in test_merge_existing()
983 .mm = &mm, in test_merge_existing()
1001 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags); in test_merge_existing()
1003 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, vm_flags); in test_merge_existing()
1020 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1023 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1035 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags); in test_merge_existing()
1036 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, vm_flags); in test_merge_existing()
1048 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1051 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1063 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_merge_existing()
1065 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in test_merge_existing()
1082 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1085 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1097 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_merge_existing()
1099 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in test_merge_existing()
1111 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1114 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1126 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_merge_existing()
1128 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in test_merge_existing()
1129 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags); in test_merge_existing()
1141 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1144 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1161 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_merge_existing()
1162 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags); in test_merge_existing()
1163 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, vm_flags); in test_merge_existing()
1201 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_merge_existing()
1209 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1210 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1213 .mm = &mm, in test_anon_vma_non_mergeable()
1232 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_anon_vma_non_mergeable()
1233 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in test_anon_vma_non_mergeable()
1234 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags); in test_anon_vma_non_mergeable()
1257 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1270 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_anon_vma_non_mergeable()
1271 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags); in test_anon_vma_non_mergeable()
1288 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1296 struct mm_struct mm = {}; in test_dup_anon_vma() local
1297 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1299 .mm = &mm, in test_dup_anon_vma()
1316 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1317 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1331 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1342 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1343 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1344 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1364 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1375 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1376 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1377 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1393 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1404 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1405 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags); in test_dup_anon_vma()
1421 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1432 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, vm_flags); in test_dup_anon_vma()
1433 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1449 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1456 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1457 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1459 .mm = &mm, in test_vmi_prealloc_fail()
1471 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vmi_prealloc_fail()
1472 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vmi_prealloc_fail()
1491 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1499 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vmi_prealloc_fail()
1500 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vmi_prealloc_fail()
1515 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1522 struct mm_struct mm = {}; in test_merge_extend() local
1523 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1526 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, vm_flags); in test_merge_extend()
1527 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in test_merge_extend()
1543 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1545 cleanup_mm(&mm, &vmi); in test_merge_extend()
1552 struct mm_struct mm = {}; in test_copy_vma() local
1554 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1559 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_copy_vma()
1567 cleanup_mm(&mm, &vmi); in test_copy_vma()
1571 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_copy_vma()
1572 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, vm_flags); in test_copy_vma()
1578 cleanup_mm(&mm, &vmi); in test_copy_vma()
1585 struct mm_struct mm = {}; in test_expand_only_mode() local
1586 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1588 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, vm_flags, 5); in test_expand_only_mode()
1596 alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_expand_only_mode()
1603 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_expand_only_mode()
1618 cleanup_mm(&mm, &vmi); in test_expand_only_mode()
1624 struct mm_struct mm = {}; in test_mmap_region_basic() local
1627 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
1629 current->mm = &mm; in test_mmap_region_basic()
1655 ASSERT_EQ(mm.map_count, 2); in test_mmap_region_basic()
1669 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()