1 #include "test/jemalloc_test.h"
2
3 rtree_node_alloc_t *rtree_node_alloc_orig;
4 rtree_node_dalloc_t *rtree_node_dalloc_orig;
5
6 rtree_t *test_rtree;
7
8 static rtree_elm_t *
rtree_node_alloc_intercept(tsdn_t * tsdn,rtree_t * rtree,size_t nelms)9 rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms)
10 {
11 rtree_elm_t *node;
12
13 if (rtree != test_rtree)
14 return rtree_node_alloc_orig(tsdn, rtree, nelms);
15
16 malloc_mutex_unlock(tsdn, &rtree->init_lock);
17 node = (rtree_elm_t *)calloc(nelms, sizeof(rtree_elm_t));
18 assert_ptr_not_null(node, "Unexpected calloc() failure");
19 malloc_mutex_lock(tsdn, &rtree->init_lock);
20
21 return (node);
22 }
23
24 static void
rtree_node_dalloc_intercept(tsdn_t * tsdn,rtree_t * rtree,rtree_elm_t * node)25 rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node)
26 {
27 if (rtree != test_rtree) {
28 rtree_node_dalloc_orig(tsdn, rtree, node);
29 return;
30 }
31
32 free(node);
33 }
34
TEST_BEGIN(test_rtree_read_empty)35 TEST_BEGIN(test_rtree_read_empty)
36 {
37 tsdn_t *tsdn;
38 unsigned i;
39
40 tsdn = tsdn_fetch();
41
42 for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
43 rtree_t rtree;
44 rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
45 test_rtree = &rtree;
46 assert_false(rtree_new(&rtree, i),
47 "Unexpected rtree_new() failure");
48 assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx, 0, false),
49 "rtree_read() should return NULL for empty tree");
50 rtree_delete(tsdn, &rtree);
51 test_rtree = NULL;
52 }
53 }
54 TEST_END
55
56 #define NTHREADS 8
57 #define MAX_NBITS 18
58 #define NITERS 1000
59 #define SEED 42
60
61 typedef struct {
62 unsigned nbits;
63 rtree_t rtree;
64 uint32_t seed;
65 } thd_start_arg_t;
66
67 static void *
thd_start(void * varg)68 thd_start(void *varg)
69 {
70 thd_start_arg_t *arg = (thd_start_arg_t *)varg;
71 rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
72 sfmt_t *sfmt;
73 extent_t *extent;
74 tsdn_t *tsdn;
75 unsigned i;
76
77 sfmt = init_gen_rand(arg->seed);
78 extent = (extent_t *)malloc(sizeof(extent));
79 assert_ptr_not_null(extent, "Unexpected malloc() failure");
80 tsdn = tsdn_fetch();
81
82 for (i = 0; i < NITERS; i++) {
83 uintptr_t key = (uintptr_t)gen_rand64(sfmt);
84 if (i % 2 == 0) {
85 rtree_elm_t *elm;
86
87 elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
88 key, false, true);
89 assert_ptr_not_null(elm,
90 "Unexpected rtree_elm_acquire() failure");
91 rtree_elm_write_acquired(tsdn, &arg->rtree, elm,
92 extent);
93 rtree_elm_release(tsdn, &arg->rtree, elm);
94
95 elm = rtree_elm_acquire(tsdn, &arg->rtree, &rtree_ctx,
96 key, true, false);
97 assert_ptr_not_null(elm,
98 "Unexpected rtree_elm_acquire() failure");
99 rtree_elm_read_acquired(tsdn, &arg->rtree, elm);
100 rtree_elm_release(tsdn, &arg->rtree, elm);
101 } else
102 rtree_read(tsdn, &arg->rtree, &rtree_ctx, key, false);
103 }
104
105 free(extent);
106 fini_gen_rand(sfmt);
107 return (NULL);
108 }
109
TEST_BEGIN(test_rtree_concurrent)110 TEST_BEGIN(test_rtree_concurrent)
111 {
112 thd_start_arg_t arg;
113 thd_t thds[NTHREADS];
114 sfmt_t *sfmt;
115 tsdn_t *tsdn;
116 unsigned i, j;
117
118 sfmt = init_gen_rand(SEED);
119 tsdn = tsdn_fetch();
120 for (i = 1; i < MAX_NBITS; i++) {
121 arg.nbits = i;
122 test_rtree = &arg.rtree;
123 assert_false(rtree_new(&arg.rtree, arg.nbits),
124 "Unexpected rtree_new() failure");
125 arg.seed = gen_rand32(sfmt);
126 for (j = 0; j < NTHREADS; j++)
127 thd_create(&thds[j], thd_start, (void *)&arg);
128 for (j = 0; j < NTHREADS; j++)
129 thd_join(thds[j], NULL);
130 rtree_delete(tsdn, &arg.rtree);
131 test_rtree = NULL;
132 }
133 fini_gen_rand(sfmt);
134 }
135 TEST_END
136
137 #undef NTHREADS
138 #undef MAX_NBITS
139 #undef NITERS
140 #undef SEED
141
TEST_BEGIN(test_rtree_extrema)142 TEST_BEGIN(test_rtree_extrema)
143 {
144 unsigned i;
145 extent_t extent_a, extent_b;
146 tsdn_t *tsdn;
147
148 tsdn = tsdn_fetch();
149
150 for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
151 rtree_t rtree;
152 rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
153 test_rtree = &rtree;
154 assert_false(rtree_new(&rtree, i),
155 "Unexpected rtree_new() failure");
156
157 assert_false(rtree_write(tsdn, &rtree, &rtree_ctx, 0,
158 &extent_a), "Unexpected rtree_write() failure, i=%u", i);
159 assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx, 0, true),
160 &extent_a,
161 "rtree_read() should return previously set value, i=%u", i);
162
163 assert_false(rtree_write(tsdn, &rtree, &rtree_ctx,
164 ~((uintptr_t)0), &extent_b),
165 "Unexpected rtree_write() failure, i=%u", i);
166 assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
167 ~((uintptr_t)0), true), &extent_b,
168 "rtree_read() should return previously set value, i=%u", i);
169
170 rtree_delete(tsdn, &rtree);
171 test_rtree = NULL;
172 }
173 }
174 TEST_END
175
TEST_BEGIN(test_rtree_bits)176 TEST_BEGIN(test_rtree_bits)
177 {
178 tsdn_t *tsdn;
179 unsigned i, j, k;
180
181 tsdn = tsdn_fetch();
182
183 for (i = 1; i < (sizeof(uintptr_t) << 3); i++) {
184 uintptr_t keys[] = {0, 1,
185 (((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1};
186 extent_t extent;
187 rtree_t rtree;
188 rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
189
190 test_rtree = &rtree;
191 assert_false(rtree_new(&rtree, i),
192 "Unexpected rtree_new() failure");
193
194 for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) {
195 assert_false(rtree_write(tsdn, &rtree, &rtree_ctx,
196 keys[j], &extent),
197 "Unexpected rtree_write() failure");
198 for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) {
199 assert_ptr_eq(rtree_read(tsdn, &rtree,
200 &rtree_ctx, keys[k], true), &extent,
201 "rtree_read() should return previously set "
202 "value and ignore insignificant key bits; "
203 "i=%u, j=%u, k=%u, set key=%#"FMTxPTR", "
204 "get key=%#"FMTxPTR, i, j, k, keys[j],
205 keys[k]);
206 }
207 assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
208 (((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false),
209 "Only leftmost rtree leaf should be set; "
210 "i=%u, j=%u", i, j);
211 rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]);
212 }
213
214 rtree_delete(tsdn, &rtree);
215 test_rtree = NULL;
216 }
217 }
218 TEST_END
219
TEST_BEGIN(test_rtree_random)220 TEST_BEGIN(test_rtree_random)
221 {
222 unsigned i;
223 sfmt_t *sfmt;
224 tsdn_t *tsdn;
225 #define NSET 16
226 #define SEED 42
227
228 sfmt = init_gen_rand(SEED);
229 tsdn = tsdn_fetch();
230 for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) {
231 uintptr_t keys[NSET];
232 extent_t extent;
233 unsigned j;
234 rtree_t rtree;
235 rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
236 rtree_elm_t *elm;
237
238 test_rtree = &rtree;
239 assert_false(rtree_new(&rtree, i),
240 "Unexpected rtree_new() failure");
241
242 for (j = 0; j < NSET; j++) {
243 keys[j] = (uintptr_t)gen_rand64(sfmt);
244 elm = rtree_elm_acquire(tsdn, &rtree, &rtree_ctx,
245 keys[j], false, true);
246 assert_ptr_not_null(elm,
247 "Unexpected rtree_elm_acquire() failure");
248 rtree_elm_write_acquired(tsdn, &rtree, elm, &extent);
249 rtree_elm_release(tsdn, &rtree, elm);
250 assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
251 keys[j], true), &extent,
252 "rtree_read() should return previously set value");
253 }
254 for (j = 0; j < NSET; j++) {
255 assert_ptr_eq(rtree_read(tsdn, &rtree, &rtree_ctx,
256 keys[j], true), &extent,
257 "rtree_read() should return previously set value, "
258 "j=%u", j);
259 }
260
261 for (j = 0; j < NSET; j++) {
262 rtree_clear(tsdn, &rtree, &rtree_ctx, keys[j]);
263 assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
264 keys[j], true),
265 "rtree_read() should return previously set value");
266 }
267 for (j = 0; j < NSET; j++) {
268 assert_ptr_null(rtree_read(tsdn, &rtree, &rtree_ctx,
269 keys[j], true),
270 "rtree_read() should return previously set value");
271 }
272
273 rtree_delete(tsdn, &rtree);
274 test_rtree = NULL;
275 }
276 fini_gen_rand(sfmt);
277 #undef NSET
278 #undef SEED
279 }
280 TEST_END
281
282 int
main(void)283 main(void)
284 {
285 rtree_node_alloc_orig = rtree_node_alloc;
286 rtree_node_alloc = rtree_node_alloc_intercept;
287 rtree_node_dalloc_orig = rtree_node_dalloc;
288 rtree_node_dalloc = rtree_node_dalloc_intercept;
289 test_rtree = NULL;
290
291 return (test(
292 test_rtree_read_empty,
293 test_rtree_concurrent,
294 test_rtree_extrema,
295 test_rtree_bits,
296 test_rtree_random));
297 }
298