1 #include "test/jemalloc_test.h"
2
3 #ifdef JEMALLOC_FILL
4 const char *malloc_conf = "junk:false";
5 #endif
6
7 static unsigned
get_nsizes_impl(const char * cmd)8 get_nsizes_impl(const char *cmd)
9 {
10 unsigned ret;
11 size_t z;
12
13 z = sizeof(unsigned);
14 assert_d_eq(mallctl(cmd, (void *)&ret, &z, NULL, 0), 0,
15 "Unexpected mallctl(\"%s\", ...) failure", cmd);
16
17 return (ret);
18 }
19
20 static unsigned
get_nlarge(void)21 get_nlarge(void)
22 {
23 return (get_nsizes_impl("arenas.nlextents"));
24 }
25
26 static size_t
get_size_impl(const char * cmd,size_t ind)27 get_size_impl(const char *cmd, size_t ind)
28 {
29 size_t ret;
30 size_t z;
31 size_t mib[4];
32 size_t miblen = 4;
33
34 z = sizeof(size_t);
35 assert_d_eq(mallctlnametomib(cmd, mib, &miblen),
36 0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
37 mib[2] = ind;
38 z = sizeof(size_t);
39 assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0),
40 0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
41
42 return (ret);
43 }
44
45 static size_t
get_large_size(size_t ind)46 get_large_size(size_t ind)
47 {
48 return (get_size_impl("arenas.lextent.0.size", ind));
49 }
50
51 /*
52 * On systems which can't merge extents, tests that call this function generate
53 * a lot of dirty memory very quickly. Purging between cycles mitigates
54 * potential OOM on e.g. 32-bit Windows.
55 */
56 static void
purge(void)57 purge(void)
58 {
59 assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
60 "Unexpected mallctl error");
61 }
62
TEST_BEGIN(test_overflow)63 TEST_BEGIN(test_overflow)
64 {
65 size_t largemax;
66
67 largemax = get_large_size(get_nlarge()-1);
68
69 assert_ptr_null(mallocx(largemax+1, 0),
70 "Expected OOM for mallocx(size=%#zx, 0)", largemax+1);
71
72 assert_ptr_null(mallocx(ZU(PTRDIFF_MAX)+1, 0),
73 "Expected OOM for mallocx(size=%#zx, 0)", ZU(PTRDIFF_MAX)+1);
74
75 assert_ptr_null(mallocx(SIZE_T_MAX, 0),
76 "Expected OOM for mallocx(size=%#zx, 0)", SIZE_T_MAX);
77
78 assert_ptr_null(mallocx(1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)),
79 "Expected OOM for mallocx(size=1, MALLOCX_ALIGN(%#zx))",
80 ZU(PTRDIFF_MAX)+1);
81 }
82 TEST_END
83
TEST_BEGIN(test_oom)84 TEST_BEGIN(test_oom)
85 {
86 size_t largemax;
87 bool oom;
88 void *ptrs[3];
89 unsigned i;
90
91 /*
92 * It should be impossible to allocate three objects that each consume
93 * nearly half the virtual address space.
94 */
95 largemax = get_large_size(get_nlarge()-1);
96 oom = false;
97 for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
98 ptrs[i] = mallocx(largemax, 0);
99 if (ptrs[i] == NULL)
100 oom = true;
101 }
102 assert_true(oom,
103 "Expected OOM during series of calls to mallocx(size=%zu, 0)",
104 largemax);
105 for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
106 if (ptrs[i] != NULL)
107 dallocx(ptrs[i], 0);
108 }
109 purge();
110
111 #if LG_SIZEOF_PTR == 3
112 assert_ptr_null(mallocx(0x8000000000000000ULL,
113 MALLOCX_ALIGN(0x8000000000000000ULL)),
114 "Expected OOM for mallocx()");
115 assert_ptr_null(mallocx(0x8000000000000000ULL,
116 MALLOCX_ALIGN(0x80000000)),
117 "Expected OOM for mallocx()");
118 #else
119 assert_ptr_null(mallocx(0x80000000UL, MALLOCX_ALIGN(0x80000000UL)),
120 "Expected OOM for mallocx()");
121 #endif
122 }
123 TEST_END
124
TEST_BEGIN(test_basic)125 TEST_BEGIN(test_basic)
126 {
127 #define MAXSZ (((size_t)1) << 23)
128 size_t sz;
129
130 for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) {
131 size_t nsz, rsz;
132 void *p;
133 nsz = nallocx(sz, 0);
134 assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
135 p = mallocx(sz, 0);
136 assert_ptr_not_null(p,
137 "Unexpected mallocx(size=%zx, flags=0) error", sz);
138 rsz = sallocx(p, 0);
139 assert_zu_ge(rsz, sz, "Real size smaller than expected");
140 assert_zu_eq(nsz, rsz, "nallocx()/sallocx() size mismatch");
141 dallocx(p, 0);
142
143 p = mallocx(sz, 0);
144 assert_ptr_not_null(p,
145 "Unexpected mallocx(size=%zx, flags=0) error", sz);
146 dallocx(p, 0);
147
148 nsz = nallocx(sz, MALLOCX_ZERO);
149 assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
150 p = mallocx(sz, MALLOCX_ZERO);
151 assert_ptr_not_null(p,
152 "Unexpected mallocx(size=%zx, flags=MALLOCX_ZERO) error",
153 nsz);
154 rsz = sallocx(p, 0);
155 assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch");
156 dallocx(p, 0);
157 purge();
158 }
159 #undef MAXSZ
160 }
161 TEST_END
162
TEST_BEGIN(test_alignment_and_size)163 TEST_BEGIN(test_alignment_and_size)
164 {
165 #define MAXALIGN (((size_t)1) << 23)
166 #define NITER 4
167 size_t nsz, rsz, sz, alignment, total;
168 unsigned i;
169 void *ps[NITER];
170
171 for (i = 0; i < NITER; i++)
172 ps[i] = NULL;
173
174 for (alignment = 8;
175 alignment <= MAXALIGN;
176 alignment <<= 1) {
177 total = 0;
178 for (sz = 1;
179 sz < 3 * alignment && sz < (1U << 31);
180 sz += (alignment >> (LG_SIZEOF_PTR-1)) - 1) {
181 for (i = 0; i < NITER; i++) {
182 nsz = nallocx(sz, MALLOCX_ALIGN(alignment) |
183 MALLOCX_ZERO);
184 assert_zu_ne(nsz, 0,
185 "nallocx() error for alignment=%zu, "
186 "size=%zu (%#zx)", alignment, sz, sz);
187 ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) |
188 MALLOCX_ZERO);
189 assert_ptr_not_null(ps[i],
190 "mallocx() error for alignment=%zu, "
191 "size=%zu (%#zx)", alignment, sz, sz);
192 rsz = sallocx(ps[i], 0);
193 assert_zu_ge(rsz, sz,
194 "Real size smaller than expected for "
195 "alignment=%zu, size=%zu", alignment, sz);
196 assert_zu_eq(nsz, rsz,
197 "nallocx()/sallocx() size mismatch for "
198 "alignment=%zu, size=%zu", alignment, sz);
199 assert_ptr_null(
200 (void *)((uintptr_t)ps[i] & (alignment-1)),
201 "%p inadequately aligned for"
202 " alignment=%zu, size=%zu", ps[i],
203 alignment, sz);
204 total += rsz;
205 if (total >= (MAXALIGN << 1))
206 break;
207 }
208 for (i = 0; i < NITER; i++) {
209 if (ps[i] != NULL) {
210 dallocx(ps[i], 0);
211 ps[i] = NULL;
212 }
213 }
214 }
215 purge();
216 }
217 #undef MAXALIGN
218 #undef NITER
219 }
220 TEST_END
221
222 int
main(void)223 main(void)
224 {
225 return (test(
226 test_overflow,
227 test_oom,
228 test_basic,
229 test_alignment_and_size));
230 }
231