Lines Matching refs:mt
29 #define RCU_MT_BUG_ON(test, y) {if (y) { test->stop = true; } MT_BUG_ON(test->mt, y); }
32 struct maple_tree *mt; member
62 static noinline void check_new_node(struct maple_tree *mt) in check_new_node() argument
70 MA_STATE(mas, mt, 0, 0); in check_new_node()
73 mtree_lock(mt); in check_new_node()
78 MT_BUG_ON(mt, mas_alloc_req(&mas) != 3); in check_new_node()
80 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
81 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
83 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
85 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
86 MT_BUG_ON(mt, mn == NULL); in check_new_node()
87 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
88 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
91 mtree_unlock(mt); in check_new_node()
95 mtree_lock(mt); in check_new_node()
99 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
102 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
105 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
106 MT_BUG_ON(mt, mn == NULL); in check_new_node()
107 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
108 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
109 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
119 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
121 MT_BUG_ON(mt, mas_alloc_req(&mas) != 0); in check_new_node()
123 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
124 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
125 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
127 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
133 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
136 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
137 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
140 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
141 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
142 MT_BUG_ON(mt, mn == NULL); in check_new_node()
143 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
144 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
145 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
147 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
148 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
151 MT_BUG_ON(mt, mas_alloc_req(&mas) != 2); in check_new_node()
153 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
154 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
155 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
156 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
157 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
160 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
161 MT_BUG_ON(mt, !mn); in check_new_node()
162 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
168 MT_BUG_ON(mt, mas_alloc_req(&mas) != total); in check_new_node()
170 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
190 MT_BUG_ON(mt, mas.alloc->node_count != e); in check_new_node()
192 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
193 MT_BUG_ON(mt, mas_allocated(&mas) != i - 1); in check_new_node()
194 MT_BUG_ON(mt, !mn); in check_new_node()
202 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
205 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
206 MT_BUG_ON(mt, !mn); in check_new_node()
207 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
209 MT_BUG_ON(mt, mas_allocated(&mas) != j); in check_new_node()
211 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
212 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
215 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
219 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
221 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
223 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
229 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
231 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
233 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
235 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
237 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
239 MT_BUG_ON(mt, mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
248 MT_BUG_ON(mt, !mas.alloc); in check_new_node()
254 MT_BUG_ON(mt, !smn->slot[j]); in check_new_node()
260 MT_BUG_ON(mt, mas_allocated(&mas) != total); in check_new_node()
263 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
267 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
270 MT_BUG_ON(mt, mn == NULL); in check_new_node()
271 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
274 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
278 MA_STATE(mas2, mt, 0, 0); in check_new_node()
281 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
284 MT_BUG_ON(mt, mn == NULL); in check_new_node()
285 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
287 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
289 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
290 MT_BUG_ON(mt, mas_allocated(&mas2) != i); in check_new_node()
293 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
295 MT_BUG_ON(mt, mn == NULL); in check_new_node()
296 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
299 MT_BUG_ON(mt, mas_allocated(&mas2) != 0); in check_new_node()
303 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
305 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
306 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
307 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
308 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
311 MT_BUG_ON(mt, mn == NULL); in check_new_node()
312 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
313 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS); in check_new_node()
314 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
317 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
318 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
322 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
323 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
324 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
325 MT_BUG_ON(mt, mas_alloc_req(&mas)); in check_new_node()
326 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
327 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
329 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
330 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
331 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
333 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
334 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
336 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
340 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
343 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
396 MT_BUG_ON(mt, mas_allocated(&mas) != 5); in check_new_node()
401 MT_BUG_ON(mt, mas_allocated(&mas) != 10); in check_new_node()
407 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
412 MT_BUG_ON(mt, mas_allocated(&mas) != 10 + MAPLE_ALLOC_SLOTS - 1); in check_new_node()
415 mtree_unlock(mt); in check_new_node()
421 static noinline void check_erase(struct maple_tree *mt, unsigned long index, in check_erase() argument
424 MT_BUG_ON(mt, mtree_test_erase(mt, index) != ptr); in check_erase()
427 #define erase_check_load(mt, i) check_load(mt, set[i], entry[i%2]) argument
428 #define erase_check_insert(mt, i) check_insert(mt, set[i], entry[i%2]) argument
429 #define erase_check_erase(mt, i) check_erase(mt, set[i], entry[i%2]) argument
431 static noinline void check_erase_testset(struct maple_tree *mt) in check_erase_testset() argument
449 void *entry[2] = { ptr, mt }; in check_erase_testset()
454 mt_set_in_rcu(mt); in check_erase_testset()
456 erase_check_insert(mt, i); in check_erase_testset()
458 erase_check_load(mt, i); in check_erase_testset()
461 erase_check_erase(mt, 1); in check_erase_testset()
462 erase_check_load(mt, 0); in check_erase_testset()
463 check_load(mt, set[1], NULL); in check_erase_testset()
465 erase_check_load(mt, i); in check_erase_testset()
468 erase_check_erase(mt, 2); in check_erase_testset()
469 erase_check_load(mt, 0); in check_erase_testset()
470 check_load(mt, set[1], NULL); in check_erase_testset()
471 check_load(mt, set[2], NULL); in check_erase_testset()
473 erase_check_insert(mt, 1); in check_erase_testset()
474 erase_check_insert(mt, 2); in check_erase_testset()
477 erase_check_load(mt, i); in check_erase_testset()
480 erase_check_load(mt, 3); in check_erase_testset()
481 erase_check_erase(mt, 1); in check_erase_testset()
482 erase_check_load(mt, 0); in check_erase_testset()
483 check_load(mt, set[1], NULL); in check_erase_testset()
485 erase_check_load(mt, i); in check_erase_testset()
491 root_node = mt->ma_root; in check_erase_testset()
492 erase_check_insert(mt, 1); in check_erase_testset()
494 erase_check_load(mt, 0); in check_erase_testset()
495 check_load(mt, 5016, NULL); in check_erase_testset()
496 erase_check_load(mt, 1); in check_erase_testset()
497 check_load(mt, 5013, NULL); in check_erase_testset()
498 erase_check_load(mt, 2); in check_erase_testset()
499 check_load(mt, 5018, NULL); in check_erase_testset()
500 erase_check_load(mt, 3); in check_erase_testset()
502 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
503 erase_check_load(mt, 0); in check_erase_testset()
504 check_load(mt, 5016, NULL); in check_erase_testset()
505 erase_check_load(mt, 1); in check_erase_testset()
506 check_load(mt, 5013, NULL); in check_erase_testset()
507 check_load(mt, set[2], NULL); in check_erase_testset()
508 check_load(mt, 5018, NULL); in check_erase_testset()
510 erase_check_load(mt, 3); in check_erase_testset()
512 root_node = mt->ma_root; in check_erase_testset()
513 erase_check_insert(mt, 2); in check_erase_testset()
515 erase_check_load(mt, 0); in check_erase_testset()
516 check_load(mt, 5016, NULL); in check_erase_testset()
517 erase_check_load(mt, 1); in check_erase_testset()
518 check_load(mt, 5013, NULL); in check_erase_testset()
519 erase_check_load(mt, 2); in check_erase_testset()
520 check_load(mt, 5018, NULL); in check_erase_testset()
521 erase_check_load(mt, 3); in check_erase_testset()
524 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
525 erase_check_load(mt, 0); in check_erase_testset()
526 check_load(mt, 5016, NULL); in check_erase_testset()
527 check_load(mt, set[2], NULL); in check_erase_testset()
528 erase_check_erase(mt, 0); /* erase 5015 to check append */ in check_erase_testset()
529 check_load(mt, set[0], NULL); in check_erase_testset()
530 check_load(mt, 5016, NULL); in check_erase_testset()
531 erase_check_insert(mt, 4); /* 1000 < Should not split. */ in check_erase_testset()
532 check_load(mt, set[0], NULL); in check_erase_testset()
533 check_load(mt, 5016, NULL); in check_erase_testset()
534 erase_check_load(mt, 1); in check_erase_testset()
535 check_load(mt, 5013, NULL); in check_erase_testset()
536 check_load(mt, set[2], NULL); in check_erase_testset()
537 check_load(mt, 5018, NULL); in check_erase_testset()
538 erase_check_load(mt, 4); in check_erase_testset()
539 check_load(mt, 999, NULL); in check_erase_testset()
540 check_load(mt, 1001, NULL); in check_erase_testset()
541 erase_check_load(mt, 4); in check_erase_testset()
542 if (mt_in_rcu(mt)) in check_erase_testset()
543 MT_BUG_ON(mt, root_node == mt->ma_root); in check_erase_testset()
545 MT_BUG_ON(mt, root_node != mt->ma_root); in check_erase_testset()
548 MT_BUG_ON(mt, !mte_is_leaf(mt->ma_root)); in check_erase_testset()
552 erase_check_insert(mt, 0); in check_erase_testset()
553 erase_check_insert(mt, 2); in check_erase_testset()
556 erase_check_insert(mt, i); in check_erase_testset()
558 erase_check_load(mt, j); in check_erase_testset()
561 erase_check_erase(mt, 14); /*6015 */ in check_erase_testset()
564 check_load(mt, set[i], NULL); in check_erase_testset()
566 erase_check_load(mt, i); in check_erase_testset()
568 erase_check_erase(mt, 16); /*7002 */ in check_erase_testset()
571 check_load(mt, set[i], NULL); in check_erase_testset()
573 erase_check_load(mt, i); in check_erase_testset()
578 erase_check_erase(mt, 13); /*6012 */ in check_erase_testset()
581 check_load(mt, set[i], NULL); in check_erase_testset()
583 erase_check_load(mt, i); in check_erase_testset()
586 erase_check_erase(mt, 15); /*7003 */ in check_erase_testset()
589 check_load(mt, set[i], NULL); in check_erase_testset()
591 erase_check_load(mt, i); in check_erase_testset()
595 erase_check_erase(mt, 17); /*7008 *should* cause coalesce. */ in check_erase_testset()
598 check_load(mt, set[i], NULL); in check_erase_testset()
600 erase_check_load(mt, i); in check_erase_testset()
603 erase_check_erase(mt, 18); /*7012 */ in check_erase_testset()
606 check_load(mt, set[i], NULL); in check_erase_testset()
608 erase_check_load(mt, i); in check_erase_testset()
612 erase_check_erase(mt, 19); /*7015 */ in check_erase_testset()
615 check_load(mt, set[i], NULL); in check_erase_testset()
617 erase_check_load(mt, i); in check_erase_testset()
620 erase_check_erase(mt, 20); /*8003 */ in check_erase_testset()
623 check_load(mt, set[i], NULL); in check_erase_testset()
625 erase_check_load(mt, i); in check_erase_testset()
628 erase_check_erase(mt, 21); /*8002 */ in check_erase_testset()
631 check_load(mt, set[i], NULL); in check_erase_testset()
633 erase_check_load(mt, i); in check_erase_testset()
637 erase_check_erase(mt, 22); /*8008 */ in check_erase_testset()
640 check_load(mt, set[i], NULL); in check_erase_testset()
642 erase_check_load(mt, i); in check_erase_testset()
645 erase_check_erase(mt, i); in check_erase_testset()
649 check_load(mt, set[i], NULL); in check_erase_testset()
651 erase_check_load(mt, i); in check_erase_testset()
657 erase_check_insert(mt, i); in check_erase_testset()
661 erase_check_erase(mt, i); in check_erase_testset()
664 erase_check_load(mt, j); in check_erase_testset()
666 check_load(mt, set[j], NULL); in check_erase_testset()
671 erase_check_erase(mt, i); in check_erase_testset()
674 erase_check_load(mt, j); in check_erase_testset()
676 check_load(mt, set[j], NULL); in check_erase_testset()
679 erase_check_insert(mt, 8); in check_erase_testset()
680 erase_check_insert(mt, 9); in check_erase_testset()
681 erase_check_erase(mt, 8); in check_erase_testset()
688 #define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, \ argument
935 static noinline void check_erase2_testset(struct maple_tree *mt, in check_erase2_testset() argument
944 MA_STATE(mas, mt, 0, 0); in check_erase2_testset()
951 MA_STATE(mas_start, mt, set[i+1], set[i+1]); in check_erase2_testset()
952 MA_STATE(mas_end, mt, set[i+2], set[i+2]); in check_erase2_testset()
979 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1007 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1012 check_erase(mt, set[i+1], xa_mk_value(set[i+1])); in check_erase2_testset()
1016 mt_validate(mt); in check_erase2_testset()
1018 MT_BUG_ON(mt, !mt_height(mt)); in check_erase2_testset()
1020 mt_dump(mt); in check_erase2_testset()
1028 mt_for_each(mt, foo, addr, ULONG_MAX) { in check_erase2_testset()
1041 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1054 MT_BUG_ON(mt, 1); in check_erase2_testset()
1069 mt_validate(mt); in check_erase2_testset()
1072 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1074 MT_BUG_ON(mt, mtree_load(mas.tree, 0) != NULL); in check_erase2_testset()
1080 static noinline void check_erase2_sets(struct maple_tree *mt) in check_erase2_sets() argument
33852 MA_STATE(mas, mt, 0, 0); in check_erase2_sets()
33855 check_erase2_testset(mt, set, ARRAY_SIZE(set)); in check_erase2_sets()
33857 mtree_destroy(mt); in check_erase2_sets()
33859 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33860 check_erase2_testset(mt, set2, ARRAY_SIZE(set2)); in check_erase2_sets()
33862 MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL)); in check_erase2_sets()
33863 mtree_destroy(mt); in check_erase2_sets()
33866 mt_init_flags(mt, 0); in check_erase2_sets()
33867 check_erase2_testset(mt, set3, ARRAY_SIZE(set3)); in check_erase2_sets()
33869 mtree_destroy(mt); in check_erase2_sets()
33871 mt_init_flags(mt, 0); in check_erase2_sets()
33872 check_erase2_testset(mt, set4, ARRAY_SIZE(set4)); in check_erase2_sets()
33880 mtree_destroy(mt); in check_erase2_sets()
33882 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33884 check_erase2_testset(mt, set5, ARRAY_SIZE(set5)); in check_erase2_sets()
33887 mtree_destroy(mt); in check_erase2_sets()
33889 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33890 check_erase2_testset(mt, set6, ARRAY_SIZE(set6)); in check_erase2_sets()
33892 mtree_destroy(mt); in check_erase2_sets()
33894 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33895 check_erase2_testset(mt, set7, ARRAY_SIZE(set7)); in check_erase2_sets()
33897 mtree_destroy(mt); in check_erase2_sets()
33899 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33900 check_erase2_testset(mt, set8, ARRAY_SIZE(set8)); in check_erase2_sets()
33902 mtree_destroy(mt); in check_erase2_sets()
33904 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33905 check_erase2_testset(mt, set9, ARRAY_SIZE(set9)); in check_erase2_sets()
33907 mtree_destroy(mt); in check_erase2_sets()
33909 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33910 check_erase2_testset(mt, set10, ARRAY_SIZE(set10)); in check_erase2_sets()
33912 mtree_destroy(mt); in check_erase2_sets()
33915 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33916 check_erase2_testset(mt, set11, ARRAY_SIZE(set11)); in check_erase2_sets()
33919 MT_BUG_ON(mt, mas.last != 140014592573439); in check_erase2_sets()
33920 mtree_destroy(mt); in check_erase2_sets()
33923 mas.tree = mt; in check_erase2_sets()
33926 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33927 check_erase2_testset(mt, set12, ARRAY_SIZE(set12)); in check_erase2_sets()
33935 mtree_destroy(mt); in check_erase2_sets()
33938 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33939 check_erase2_testset(mt, set13, ARRAY_SIZE(set13)); in check_erase2_sets()
33940 mtree_erase(mt, 140373516443648); in check_erase2_sets()
33944 mtree_destroy(mt); in check_erase2_sets()
33945 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33946 check_erase2_testset(mt, set14, ARRAY_SIZE(set14)); in check_erase2_sets()
33948 mtree_destroy(mt); in check_erase2_sets()
33950 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33951 check_erase2_testset(mt, set15, ARRAY_SIZE(set15)); in check_erase2_sets()
33953 mtree_destroy(mt); in check_erase2_sets()
33958 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33959 check_erase2_testset(mt, set16, ARRAY_SIZE(set16)); in check_erase2_sets()
33962 MT_BUG_ON(mt, mas.last != 139921865547775); in check_erase2_sets()
33964 mtree_destroy(mt); in check_erase2_sets()
33973 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33974 check_erase2_testset(mt, set17, ARRAY_SIZE(set17)); in check_erase2_sets()
33977 MT_BUG_ON(mt, mas.last != 139953197322239); in check_erase2_sets()
33980 mtree_destroy(mt); in check_erase2_sets()
33989 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33990 check_erase2_testset(mt, set18, ARRAY_SIZE(set18)); in check_erase2_sets()
33993 MT_BUG_ON(mt, mas.last != 140222968475647); in check_erase2_sets()
33996 mtree_destroy(mt); in check_erase2_sets()
34007 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34008 check_erase2_testset(mt, set19, ARRAY_SIZE(set19)); in check_erase2_sets()
34012 MT_BUG_ON(mt, entry != xa_mk_value(140656779083776)); in check_erase2_sets()
34014 MT_BUG_ON(mt, entry != xa_mk_value(140656766251008)); in check_erase2_sets()
34016 mtree_destroy(mt); in check_erase2_sets()
34023 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34024 check_erase2_testset(mt, set20, ARRAY_SIZE(set20)); in check_erase2_sets()
34026 check_load(mt, 94849009414144, NULL); in check_erase2_sets()
34028 mtree_destroy(mt); in check_erase2_sets()
34031 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34032 check_erase2_testset(mt, set21, ARRAY_SIZE(set21)); in check_erase2_sets()
34034 mt_validate(mt); in check_erase2_sets()
34036 mtree_destroy(mt); in check_erase2_sets()
34039 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34040 check_erase2_testset(mt, set22, ARRAY_SIZE(set22)); in check_erase2_sets()
34042 mt_validate(mt); in check_erase2_sets()
34043 ptr = mtree_load(mt, 140551363362816); in check_erase2_sets()
34044 MT_BUG_ON(mt, ptr == mtree_load(mt, 140551363420159)); in check_erase2_sets()
34046 mtree_destroy(mt); in check_erase2_sets()
34049 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34050 check_erase2_testset(mt, set23, ARRAY_SIZE(set23)); in check_erase2_sets()
34053 mt_validate(mt); in check_erase2_sets()
34054 mtree_destroy(mt); in check_erase2_sets()
34058 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34059 check_erase2_testset(mt, set24, ARRAY_SIZE(set24)); in check_erase2_sets()
34062 mt_validate(mt); in check_erase2_sets()
34063 mtree_destroy(mt); in check_erase2_sets()
34066 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34067 check_erase2_testset(mt, set25, ARRAY_SIZE(set25)); in check_erase2_sets()
34070 mt_validate(mt); in check_erase2_sets()
34071 mtree_destroy(mt); in check_erase2_sets()
34076 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34077 check_erase2_testset(mt, set26, ARRAY_SIZE(set26)); in check_erase2_sets()
34080 MT_BUG_ON(mt, mas.last != 140109040959487); in check_erase2_sets()
34082 mt_validate(mt); in check_erase2_sets()
34083 mtree_destroy(mt); in check_erase2_sets()
34088 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34089 check_erase2_testset(mt, set27, ARRAY_SIZE(set27)); in check_erase2_sets()
34091 MT_BUG_ON(mt, 0 != mtree_load(mt, 140415537422336)); in check_erase2_sets()
34093 mt_validate(mt); in check_erase2_sets()
34094 mtree_destroy(mt); in check_erase2_sets()
34098 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34099 check_erase2_testset(mt, set28, ARRAY_SIZE(set28)); in check_erase2_sets()
34104 MT_BUG_ON(mt, mas.index != 139918401601536); in check_erase2_sets()
34106 mt_validate(mt); in check_erase2_sets()
34107 mtree_destroy(mt); in check_erase2_sets()
34114 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34115 check_erase2_testset(mt, set29, ARRAY_SIZE(set29)); in check_erase2_sets()
34118 mt_validate(mt); in check_erase2_sets()
34119 mtree_destroy(mt); in check_erase2_sets()
34127 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34128 check_erase2_testset(mt, set30, ARRAY_SIZE(set30)); in check_erase2_sets()
34131 mt_validate(mt); in check_erase2_sets()
34132 mtree_destroy(mt); in check_erase2_sets()
34140 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34141 check_erase2_testset(mt, set31, ARRAY_SIZE(set31)); in check_erase2_sets()
34144 mt_validate(mt); in check_erase2_sets()
34145 mtree_destroy(mt); in check_erase2_sets()
34149 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34150 check_erase2_testset(mt, set32, ARRAY_SIZE(set32)); in check_erase2_sets()
34153 mt_validate(mt); in check_erase2_sets()
34154 mtree_destroy(mt); in check_erase2_sets()
34169 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34170 check_erase2_testset(mt, set33, ARRAY_SIZE(set33)); in check_erase2_sets()
34173 MT_BUG_ON(mt, mas.last != 140583003750399); in check_erase2_sets()
34175 mt_validate(mt); in check_erase2_sets()
34176 mtree_destroy(mt); in check_erase2_sets()
34184 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34185 check_erase2_testset(mt, set34, ARRAY_SIZE(set34)); in check_erase2_sets()
34188 mt_validate(mt); in check_erase2_sets()
34189 mtree_destroy(mt); in check_erase2_sets()
34194 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34195 check_erase2_testset(mt, set35, ARRAY_SIZE(set35)); in check_erase2_sets()
34198 mt_validate(mt); in check_erase2_sets()
34199 mtree_destroy(mt); in check_erase2_sets()
34204 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34205 check_erase2_testset(mt, set36, ARRAY_SIZE(set36)); in check_erase2_sets()
34208 mt_validate(mt); in check_erase2_sets()
34209 mtree_destroy(mt); in check_erase2_sets()
34212 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34213 check_erase2_testset(mt, set37, ARRAY_SIZE(set37)); in check_erase2_sets()
34215 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34216 mt_validate(mt); in check_erase2_sets()
34217 mtree_destroy(mt); in check_erase2_sets()
34220 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34221 check_erase2_testset(mt, set38, ARRAY_SIZE(set38)); in check_erase2_sets()
34223 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34224 mt_validate(mt); in check_erase2_sets()
34225 mtree_destroy(mt); in check_erase2_sets()
34228 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34229 check_erase2_testset(mt, set39, ARRAY_SIZE(set39)); in check_erase2_sets()
34231 mt_validate(mt); in check_erase2_sets()
34232 mtree_destroy(mt); in check_erase2_sets()
34235 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34236 check_erase2_testset(mt, set40, ARRAY_SIZE(set40)); in check_erase2_sets()
34238 mt_validate(mt); in check_erase2_sets()
34239 mtree_destroy(mt); in check_erase2_sets()
34242 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34243 check_erase2_testset(mt, set41, ARRAY_SIZE(set41)); in check_erase2_sets()
34245 mt_validate(mt); in check_erase2_sets()
34246 mtree_destroy(mt); in check_erase2_sets()
34251 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34252 check_erase2_testset(mt, set42, ARRAY_SIZE(set42)); in check_erase2_sets()
34255 MT_BUG_ON(mt, mas.last != 4041211903); in check_erase2_sets()
34257 mt_validate(mt); in check_erase2_sets()
34258 mtree_destroy(mt); in check_erase2_sets()
34263 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34264 check_erase2_testset(mt, set43, ARRAY_SIZE(set43)); in check_erase2_sets()
34267 mt_validate(mt); in check_erase2_sets()
34268 mtree_destroy(mt); in check_erase2_sets()
34307 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_fwd()
34417 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_rev()
34479 mt_dump(test->mt); in rcu_reader_rev()
34542 static void rcu_stress_rev(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_rev() argument
34562 mtree_store_range(mt, start, end, in rcu_stress_rev()
34572 mtree_store_range(mt, start, end, in rcu_stress_rev()
34583 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_rev()
34590 mtree_store_range(mt, start, end, in rcu_stress_rev()
34601 static void rcu_stress_fwd(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_fwd() argument
34619 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34629 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34640 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_fwd()
34647 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34669 static void rcu_stress(struct maple_tree *mt, bool forward) in rcu_stress() argument
34679 test.mt = mt; in rcu_stress()
34691 mtree_store_range(mt, seed, r, in rcu_stress()
34723 mtree_store_range(mt, test.index[add], test.last[add], in rcu_stress()
34727 mt_set_in_rcu(mt); in rcu_stress()
34733 rcu_stress_fwd(mt, &test, count, test_reader); in rcu_stress()
34735 rcu_stress_rev(mt, &test, count, test_reader); in rcu_stress()
34741 mt_validate(mt); in rcu_stress()
34746 struct maple_tree *mt; /* the maple tree */ member
34825 entry = mtree_load(test->mt, test->index); in rcu_val()
34826 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, &update_2, in rcu_val()
34846 MA_STATE(mas, test->mt, test->range_start, test->range_start); in rcu_loop()
34867 MT_BUG_ON(test->mt, entry != expected); in rcu_loop()
34875 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, in rcu_loop()
34888 void run_check_rcu(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu() argument
34895 mt_set_in_rcu(mt); in run_check_rcu()
34896 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu()
34911 mtree_store_range(mt, vals->index, vals->last, vals->entry2, in run_check_rcu()
34917 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu()
34921 void run_check_rcu_slowread(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu_slowread() argument
34929 mt_set_in_rcu(mt); in run_check_rcu_slowread()
34930 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slowread()
34947 mtree_store(mt, index, in run_check_rcu_slowread()
34958 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu_slowread()
34959 MT_BUG_ON(mt, !vals->seen_entry3); in run_check_rcu_slowread()
34960 MT_BUG_ON(mt, !vals->seen_both); in run_check_rcu_slowread()
34962 static noinline void check_rcu_simulated(struct maple_tree *mt) in check_rcu_simulated() argument
34968 MA_STATE(mas_writer, mt, 0, 0); in check_rcu_simulated()
34969 MA_STATE(mas_reader, mt, target, target); in check_rcu_simulated()
34973 mt_set_in_rcu(mt); in check_rcu_simulated()
34985 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
34989 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35002 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35006 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35019 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35023 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35036 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35040 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35052 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35056 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35068 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35072 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35093 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35098 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated()
35112 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35117 MT_BUG_ON(mt, mas_prev(&mas_reader, 0) != xa_mk_value(val)); in check_rcu_simulated()
35123 static noinline void check_rcu_threaded(struct maple_tree *mt) in check_rcu_threaded() argument
35133 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35137 vals.mt = mt; in check_rcu_threaded()
35148 run_check_rcu(mt, &vals); in check_rcu_threaded()
35149 mtree_destroy(mt); in check_rcu_threaded()
35151 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35153 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35160 vals.mt = mt; in check_rcu_threaded()
35169 run_check_rcu(mt, &vals); in check_rcu_threaded()
35170 mtree_destroy(mt); in check_rcu_threaded()
35174 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35175 rcu_stress(mt, true); in check_rcu_threaded()
35176 mtree_destroy(mt); in check_rcu_threaded()
35179 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35180 rcu_stress(mt, false); in check_rcu_threaded()
35181 mtree_destroy(mt); in check_rcu_threaded()
35184 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35186 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35193 vals.mt = mt; in check_rcu_threaded()
35214 run_check_rcu_slowread(mt, &vals); in check_rcu_threaded()
35264 static void check_dfs_preorder(struct maple_tree *mt) in check_dfs_preorder() argument
35268 MA_STATE(mas, mt, 0, 0); in check_dfs_preorder()
35275 check_seq(mt, max, false); in check_dfs_preorder()
35280 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35281 mtree_destroy(mt); in check_dfs_preorder()
35283 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35289 check_seq(mt, max, false); in check_dfs_preorder()
35295 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35296 mtree_destroy(mt); in check_dfs_preorder()
35298 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35301 check_rev_seq(mt, max, false); in check_dfs_preorder()
35307 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35308 mtree_destroy(mt); in check_dfs_preorder()
35310 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35318 MT_BUG_ON(mt, mas_is_err(&mas)); in check_dfs_preorder()
35332 static noinline void check_prealloc(struct maple_tree *mt) in check_prealloc() argument
35339 MA_STATE(mas, mt, 10, 20); in check_prealloc()
35343 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_prealloc()
35345 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35348 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35349 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35352 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35354 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35357 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35358 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35359 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35362 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35365 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35368 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35369 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35371 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35373 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35376 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35378 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35381 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35382 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35384 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35385 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35388 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35391 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35394 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35395 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35397 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35399 MT_BUG_ON(mt, mas_allocated(&mas) != allocated); in check_prealloc()
35400 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35403 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35405 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35408 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35409 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35411 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35413 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35416 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35417 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35419 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35420 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35423 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35424 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35427 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35430 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35431 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35433 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35435 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35438 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35442 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); in check_prealloc()
35445 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35446 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35448 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35450 MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35453 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35458 static noinline void check_spanning_write(struct maple_tree *mt) in check_spanning_write() argument
35461 MA_STATE(mas, mt, 1200, 2380); in check_spanning_write()
35464 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35466 mtree_lock(mt); in check_spanning_write()
35469 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35470 mtree_unlock(mt); in check_spanning_write()
35471 mtree_destroy(mt); in check_spanning_write()
35474 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35476 mtree_lock(mt); in check_spanning_write()
35480 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35481 mtree_unlock(mt); in check_spanning_write()
35482 mt_validate(mt); in check_spanning_write()
35483 mtree_destroy(mt); in check_spanning_write()
35486 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35488 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35491 mtree_lock(mt); in check_spanning_write()
35494 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35495 mtree_unlock(mt); in check_spanning_write()
35496 mtree_destroy(mt); in check_spanning_write()
35499 mt_init_flags(mt, 0); in check_spanning_write()
35501 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35504 mtree_lock(mt); in check_spanning_write()
35507 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35508 mtree_unlock(mt); in check_spanning_write()
35509 mtree_destroy(mt); in check_spanning_write()
35512 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35514 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35517 mtree_lock(mt); in check_spanning_write()
35520 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35521 mtree_unlock(mt); in check_spanning_write()
35522 mtree_destroy(mt); in check_spanning_write()
35525 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35527 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35530 mtree_lock(mt); in check_spanning_write()
35533 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35534 mtree_unlock(mt); in check_spanning_write()
35535 mtree_destroy(mt); in check_spanning_write()
35538 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35540 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35543 mtree_lock(mt); in check_spanning_write()
35546 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35547 mtree_unlock(mt); in check_spanning_write()
35548 mtree_destroy(mt); in check_spanning_write()
35554 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35556 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35559 mtree_lock(mt); in check_spanning_write()
35562 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35563 mtree_unlock(mt); in check_spanning_write()
35564 mtree_destroy(mt); in check_spanning_write()
35570 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35572 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35574 mtree_lock(mt); in check_spanning_write()
35577 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35578 mtree_unlock(mt); in check_spanning_write()
35579 mtree_destroy(mt); in check_spanning_write()
35582 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35584 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35585 mtree_lock(mt); in check_spanning_write()
35596 MT_BUG_ON(mt, (mas_data_end(&mas)) != mt_slot_count(mas.node) - 1); in check_spanning_write()
35599 mtree_unlock(mt); in check_spanning_write()
35600 mtree_destroy(mt); in check_spanning_write()
35603 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35605 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35607 mtree_lock(mt); in check_spanning_write()
35610 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35611 mtree_unlock(mt); in check_spanning_write()
35612 mtree_destroy(mt); in check_spanning_write()
35614 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35616 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35618 mtree_lock(mt); in check_spanning_write()
35621 mtree_unlock(mt); in check_spanning_write()
35626 static noinline void check_null_expand(struct maple_tree *mt) in check_null_expand() argument
35630 MA_STATE(mas, mt, 959, 959); in check_null_expand()
35633 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_null_expand()
35640 MT_BUG_ON(mt, mtree_load(mt, 963) != NULL); in check_null_expand()
35641 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35649 MT_BUG_ON(mt, mtree_load(mt, 884) != NULL); in check_null_expand()
35650 MT_BUG_ON(mt, mtree_load(mt, 889) != NULL); in check_null_expand()
35652 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35661 MT_BUG_ON(mt, mtree_load(mt, 899) != NULL); in check_null_expand()
35662 MT_BUG_ON(mt, mtree_load(mt, 900) != NULL); in check_null_expand()
35663 MT_BUG_ON(mt, mtree_load(mt, 905) != NULL); in check_null_expand()
35664 MT_BUG_ON(mt, mtree_load(mt, 906) != NULL); in check_null_expand()
35666 MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas)); in check_null_expand()
35675 MT_BUG_ON(mt, mtree_load(mt, 809) != NULL); in check_null_expand()
35676 MT_BUG_ON(mt, mtree_load(mt, 810) != NULL); in check_null_expand()
35677 MT_BUG_ON(mt, mtree_load(mt, 825) != NULL); in check_null_expand()
35678 MT_BUG_ON(mt, mtree_load(mt, 826) != NULL); in check_null_expand()
35680 MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas)); in check_null_expand()
35687 static noinline void check_nomem(struct maple_tree *mt) in check_nomem() argument
35689 MA_STATE(ms, mt, 1, 1); in check_nomem()
35691 MT_BUG_ON(mt, !mtree_empty(mt)); in check_nomem()
35696 MT_BUG_ON(mt, mtree_insert(mt, 1, &ms, GFP_ATOMIC) != -ENOMEM); in check_nomem()
35698 MT_BUG_ON(mt, mtree_insert(mt, 0, &ms, GFP_ATOMIC) != 0); in check_nomem()
35708 mtree_lock(mt); in check_nomem()
35710 MT_BUG_ON(mt, ms.node != MA_ERROR(-ENOMEM)); in check_nomem()
35712 MT_BUG_ON(mt, ms.node != MAS_START); in check_nomem()
35713 mtree_unlock(mt); in check_nomem()
35714 MT_BUG_ON(mt, mtree_insert(mt, 2, mt, GFP_KERNEL) != 0); in check_nomem()
35715 mtree_lock(mt); in check_nomem()
35718 mtree_unlock(mt); in check_nomem()
35719 mtree_destroy(mt); in check_nomem()
35722 static noinline void check_locky(struct maple_tree *mt) in check_locky() argument
35724 MA_STATE(ms, mt, 2, 2); in check_locky()
35725 MA_STATE(reader, mt, 2, 2); in check_locky()
35728 mt_set_in_rcu(mt); in check_locky()
35734 mt_clear_in_rcu(mt); in check_locky()