1 /*
2  * Copyright (c) 2006-2021, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2021-10-14     tyx          the first version
9  */
10 
11 #include <rtthread.h>
12 #include <stdlib.h>
13 #include "utest.h"
14 
15 struct rt_small_mem_item
16 {
17     rt_ubase_t              pool_ptr;         /**< small memory object addr */
18     rt_size_t               next;             /**< next free item */
19     rt_size_t               prev;             /**< prev free item */
20 #ifdef RT_USING_MEMTRACE
21 #ifdef ARCH_CPU_64BIT
22     rt_uint8_t              thread[8];       /**< thread name */
23 #else
24     rt_uint8_t              thread[4];       /**< thread name */
25 #endif /* ARCH_CPU_64BIT */
26 #endif /* RT_USING_MEMTRACE */
27 };
28 
29 struct rt_small_mem
30 {
31     struct rt_memory            parent;                 /**< inherit from rt_memory */
32     rt_uint8_t                 *heap_ptr;               /**< pointer to the heap */
33     struct rt_small_mem_item   *heap_end;
34     struct rt_small_mem_item   *lfree;
35     rt_size_t                   mem_size_aligned;       /**< aligned memory size */
36 };
37 
38 #define MEM_SIZE(_heap, _mem)      \
39     (((struct rt_small_mem_item *)(_mem))->next - ((rt_ubase_t)(_mem) - \
40     (rt_ubase_t)((_heap)->heap_ptr)) - RT_ALIGN(sizeof(struct rt_small_mem_item), RT_ALIGN_SIZE))
41 
42 #define TEST_MEM_SIZE 1024
43 
max_block(struct rt_small_mem * heap)44 static rt_size_t max_block(struct rt_small_mem *heap)
45 {
46     struct rt_small_mem_item *mem;
47     rt_size_t max = 0, size;
48 
49     for (mem = (struct rt_small_mem_item *)heap->heap_ptr;
50          mem != heap->heap_end;
51          mem = (struct rt_small_mem_item *)&heap->heap_ptr[mem->next])
52     {
53         if (((rt_ubase_t)mem->pool_ptr & 0x1) == 0)
54         {
55             size = MEM_SIZE(heap, mem);
56             if (size > max)
57             {
58                 max = size;
59             }
60         }
61     }
62     return max;
63 }
64 
_mem_cmp(void * ptr,rt_uint8_t v,rt_size_t size)65 static int _mem_cmp(void *ptr, rt_uint8_t v, rt_size_t size)
66 {
67     while (size-- != 0)
68     {
69         if (*(rt_uint8_t *)ptr != v)
70             return *(rt_uint8_t *)ptr - v;
71     }
72     return 0;
73 }
74 
75 struct mem_test_context
76 {
77     void *ptr;
78     rt_size_t size;
79     rt_uint8_t magic;
80 };
81 
mem_functional_test(void)82 static void mem_functional_test(void)
83 {
84     rt_size_t total_size;
85     rt_uint8_t *buf;
86     struct rt_small_mem *heap;
87     rt_uint8_t magic = __LINE__;
88 
89     /* Prepare test memory */
90     buf = rt_malloc(TEST_MEM_SIZE);
91     uassert_not_null(buf);
92     uassert_int_equal(RT_ALIGN((rt_ubase_t)buf, RT_ALIGN_SIZE), (rt_ubase_t)buf);
93     rt_memset(buf, 0xAA, TEST_MEM_SIZE);
94     /* small heap init */
95     heap = (struct rt_small_mem *)rt_smem_init("mem_tc", buf, TEST_MEM_SIZE);
96     /* get total size */
97     total_size = max_block(heap);
98     uassert_int_not_equal(total_size, 0);
99     /*
100      * Allocate all memory at a time and test whether
101      * the memory allocation release function is effective
102      */
103     {
104         struct mem_test_context ctx;
105         ctx.magic = magic++;
106         ctx.size = max_block(heap);
107         ctx.ptr = rt_smem_alloc(&heap->parent, ctx.size);
108         uassert_not_null(ctx.ptr);
109         rt_memset(ctx.ptr, ctx.magic, ctx.size);
110         uassert_int_equal(_mem_cmp(ctx.ptr, ctx.magic, ctx.size), 0);
111         rt_smem_free(ctx.ptr);
112         uassert_int_equal(max_block(heap), total_size);
113     }
114     /*
115      * Apply for memory release sequentially and
116      * test whether memory block merging is effective
117      */
118     {
119         rt_size_t i, max_free = 0;
120         struct mem_test_context ctx[3];
121         /* alloc mem */
122         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
123         {
124             ctx[i].magic = magic++;
125             ctx[i].size = max_block(heap) / (sizeof(ctx) / sizeof(ctx[0]) - i);
126             ctx[i].ptr = rt_smem_alloc(&heap->parent, ctx[i].size);
127             uassert_not_null(ctx[i].ptr);
128             rt_memset(ctx[i].ptr, ctx[i].magic, ctx[i].size);
129         }
130         /* All memory has been applied. The remaining memory should be 0 */
131         uassert_int_equal(max_block(heap), 0);
132         /* Verify that the memory data is correct */
133         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
134         {
135             uassert_int_equal(_mem_cmp(ctx[i].ptr, ctx[i].magic, ctx[i].size), 0);
136         }
137         /* Sequential memory release */
138         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
139         {
140             uassert_int_equal(_mem_cmp(ctx[i].ptr, ctx[i].magic, ctx[i].size), 0);
141             rt_smem_free(ctx[i].ptr);
142             max_free += ctx[i].size;
143             uassert_true(max_block(heap) >= max_free);
144         }
145         /* Check whether the memory is fully merged */
146         uassert_int_equal(max_block(heap), total_size);
147     }
148     /*
149      * Apply for memory release at an interval to
150      * test whether memory block merging is effective
151      */
152     {
153         rt_size_t i, max_free = 0;
154         struct mem_test_context ctx[3];
155         /* alloc mem */
156         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
157         {
158             ctx[i].magic = magic++;
159             ctx[i].size = max_block(heap) / (sizeof(ctx) / sizeof(ctx[0]) - i);
160             ctx[i].ptr = rt_smem_alloc(&heap->parent, ctx[i].size);
161             uassert_not_null(ctx[i].ptr);
162             rt_memset(ctx[i].ptr, ctx[i].magic, ctx[i].size);
163         }
164         /* All memory has been applied. The remaining memory should be 0 */
165         uassert_int_equal(max_block(heap), 0);
166         /* Verify that the memory data is correct */
167         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
168         {
169             uassert_int_equal(_mem_cmp(ctx[i].ptr, ctx[i].magic, ctx[i].size), 0);
170         }
171         /* Release even address */
172         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
173         {
174             if (i % 2 == 0)
175             {
176                 uassert_int_equal(_mem_cmp(ctx[i].ptr, ctx[i].magic, ctx[i].size), 0);
177                 rt_smem_free(ctx[i].ptr);
178                 uassert_true(max_block(heap) >= ctx[0].size);
179             }
180         }
181         /* Release odd addresses and merge memory blocks */
182         for (i = 0; i < sizeof(ctx) / sizeof(ctx[0]); i++)
183         {
184             if (i % 2 != 0)
185             {
186                 uassert_int_equal(_mem_cmp(ctx[i].ptr, ctx[i].magic, ctx[i].size), 0);
187                 rt_smem_free(ctx[i].ptr);
188                 max_free += ctx[i - 1].size + ctx[i + 1].size;
189                 uassert_true(max_block(heap) >= max_free);
190             }
191         }
192         /* Check whether the memory is fully merged */
193         uassert_int_equal(max_block(heap), total_size);
194     }
195     /* mem realloc test,Small - > Large */
196     {
197         /* Request a piece of memory for subsequent reallocation operations */
198         struct mem_test_context ctx[3];
199         ctx[0].magic = magic++;
200         ctx[0].size = max_block(heap) / 3;
201         ctx[0].ptr = rt_smem_alloc(&heap->parent, ctx[0].size);
202         uassert_not_null(ctx[0].ptr);
203         rt_memset(ctx[0].ptr, ctx[0].magic, ctx[0].size);
204         /* Apply for a small piece of memory and split the continuous memory */
205         ctx[1].magic = magic++;
206         ctx[1].size = RT_ALIGN_SIZE;
207         ctx[1].ptr = rt_smem_alloc(&heap->parent, ctx[1].size);
208         uassert_not_null(ctx[1].ptr);
209         rt_memset(ctx[1].ptr, ctx[1].magic, ctx[1].size);
210         /* Check whether the maximum memory block is larger than the first piece of memory */
211         uassert_true(max_block(heap) > ctx[0].size);
212         /* Reallocate the first piece of memory */
213         ctx[2].magic = magic++;
214         ctx[2].size = max_block(heap);
215         ctx[2].ptr = rt_smem_realloc(&heap->parent, ctx[0].ptr, ctx[2].size);
216         uassert_not_null(ctx[2].ptr);
217         uassert_int_not_equal(ctx[0].ptr, ctx[2].ptr);
218         uassert_int_equal(_mem_cmp(ctx[2].ptr, ctx[0].magic, ctx[0].size), 0);
219         rt_memset(ctx[2].ptr, ctx[2].magic, ctx[2].size);
220         /* Free the second piece of memory */
221         uassert_int_equal(_mem_cmp(ctx[1].ptr, ctx[1].magic, ctx[1].size), 0);
222         rt_smem_free(ctx[1].ptr);
223         /* Free reallocated memory */
224         uassert_int_equal(_mem_cmp(ctx[2].ptr, ctx[2].magic, ctx[2].size), 0);
225         rt_smem_free(ctx[2].ptr);
226         /* Check memory integrity */
227         uassert_int_equal(max_block(heap), total_size);
228     }
229     /* mem realloc test,Large - > Small */
230     {
231         rt_size_t max_free;
232         struct mem_test_context ctx;
233         /* alloc a piece of memory */
234         ctx.magic = magic++;
235         ctx.size = max_block(heap) / 2;
236         ctx.ptr = rt_smem_alloc(&heap->parent, ctx.size);
237         uassert_not_null(ctx.ptr);
238         rt_memset(ctx.ptr, ctx.magic, ctx.size);
239         uassert_int_equal(_mem_cmp(ctx.ptr, ctx.magic, ctx.size), 0);
240         /* Get remaining memory */
241         max_free = max_block(heap);
242         /* Change memory size */
243         ctx.size = ctx.size / 2;
244         uassert_int_equal((rt_ubase_t)rt_smem_realloc(&heap->parent, ctx.ptr, ctx.size), (rt_ubase_t)ctx.ptr);
245         /* Get remaining size */
246         uassert_true(max_block(heap) > max_free);
247         /* Free memory */
248         uassert_int_equal(_mem_cmp(ctx.ptr, ctx.magic, ctx.size), 0);
249         rt_smem_free(ctx.ptr);
250         /* Check memory integrity */
251         uassert_int_equal(max_block(heap), total_size);
252     }
253     /* mem realloc test,equal */
254     {
255         rt_size_t max_free;
256         struct mem_test_context ctx;
257         /* alloc a piece of memory */
258         ctx.magic = magic++;
259         ctx.size = max_block(heap) / 2;
260         ctx.ptr = rt_smem_alloc(&heap->parent, ctx.size);
261         uassert_not_null(ctx.ptr);
262         rt_memset(ctx.ptr, ctx.magic, ctx.size);
263         uassert_int_equal(_mem_cmp(ctx.ptr, ctx.magic, ctx.size), 0);
264         /* Get remaining memory */
265         max_free = max_block(heap);
266         /* Do not change memory size */
267         uassert_int_equal((rt_ubase_t)rt_smem_realloc(&heap->parent, ctx.ptr, ctx.size), (rt_ubase_t)ctx.ptr);
268         /* Get remaining size */
269         uassert_true(max_block(heap) == max_free);
270         /* Free memory */
271         uassert_int_equal(_mem_cmp(ctx.ptr, ctx.magic, ctx.size), 0);
272         rt_smem_free(ctx.ptr);
273         /* Check memory integrity */
274         uassert_int_equal(max_block(heap), total_size);
275     }
276     /* small heap deinit */
277     rt_smem_detach(&heap->parent);
278     /* release test resources */
279     rt_free(buf);
280 }
281 
282 struct mem_alloc_context
283 {
284     rt_list_t node;
285     rt_size_t size;
286     rt_uint8_t magic;
287 };
288 
289 struct mem_alloc_head
290 {
291     rt_list_t list;
292     rt_size_t count;
293     rt_tick_t start;
294     rt_tick_t end;
295     rt_tick_t interval;
296 };
297 
298 #define MEM_RANG_ALLOC_BLK_MIN  2
299 #define MEM_RANG_ALLOC_BLK_MAX  5
300 #define MEM_RANG_ALLOC_TEST_TIME 5
301 
mem_alloc_test(void)302 static void mem_alloc_test(void)
303 {
304     struct mem_alloc_head head;
305     rt_uint8_t *buf;
306     struct rt_small_mem *heap;
307     rt_size_t total_size, size;
308     struct mem_alloc_context *ctx;
309 
310     /* init */
311     rt_list_init(&head.list);
312     head.count = 0;
313     head.start = rt_tick_get();
314     head.end = rt_tick_get() + rt_tick_from_millisecond(MEM_RANG_ALLOC_TEST_TIME * 1000);
315     head.interval = (head.end - head.start) / 20;
316     buf = rt_malloc(TEST_MEM_SIZE);
317     uassert_not_null(buf);
318     uassert_int_equal(RT_ALIGN((rt_ubase_t)buf, RT_ALIGN_SIZE), (rt_ubase_t)buf);
319     rt_memset(buf, 0xAA, TEST_MEM_SIZE);
320     heap =  (struct rt_small_mem *)rt_smem_init("mem_tc", buf, TEST_MEM_SIZE);
321     total_size = max_block(heap);
322     uassert_int_not_equal(total_size, 0);
323     /* test run */
324     while (head.end - head.start < RT_TICK_MAX / 2)
325     {
326         if (rt_tick_get() - head.start >= head.interval)
327         {
328             head.start = rt_tick_get();
329             rt_kprintf("#");
330         }
331         /* %60 probability to perform alloc operation */
332         if (rand() % 10 >= 4)
333         {
334             size = rand() % MEM_RANG_ALLOC_BLK_MAX + MEM_RANG_ALLOC_BLK_MIN;
335             size *= sizeof(struct mem_alloc_context);
336             ctx = rt_smem_alloc(&heap->parent, size);
337             if (ctx == RT_NULL)
338             {
339                 if (head.count == 0)
340                 {
341                     break;
342                 }
343                 size = head.count / 2;
344                 while (size != head.count)
345                 {
346                     ctx = rt_list_first_entry(&head.list, struct mem_alloc_context, node);
347                     rt_list_remove(&ctx->node);
348                     if (ctx->size > sizeof(*ctx))
349                     {
350                         if (_mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx)) != 0)
351                         {
352                             uassert_true(0);
353                         }
354                     }
355                     rt_memset(ctx, 0xAA, ctx->size);
356                     rt_smem_free(ctx);
357                     head.count --;
358                 }
359                 continue;
360             }
361             if (RT_ALIGN((rt_ubase_t)ctx, RT_ALIGN_SIZE) != (rt_ubase_t)ctx)
362             {
363                 uassert_int_equal(RT_ALIGN((rt_ubase_t)ctx, RT_ALIGN_SIZE), (rt_ubase_t)ctx);
364             }
365             rt_memset(ctx, 0, size);
366             rt_list_init(&ctx->node);
367             ctx->size = size;
368             ctx->magic = rand() & 0xff;
369             if (ctx->size > sizeof(*ctx))
370             {
371                 rt_memset(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx));
372             }
373             rt_list_insert_after(&head.list, &ctx->node);
374             head.count += 1;
375         }
376         else
377         {
378             if (!rt_list_isempty(&head.list))
379             {
380                 ctx = rt_list_first_entry(&head.list, struct mem_alloc_context, node);
381                 rt_list_remove(&ctx->node);
382                 if (ctx->size > sizeof(*ctx))
383                 {
384                     if (_mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx)) != 0)
385                     {
386                         uassert_true(0);
387                     }
388                 }
389                 rt_memset(ctx, 0xAA, ctx->size);
390                 rt_smem_free(ctx);
391                 head.count --;
392             }
393         }
394     }
395     while (!rt_list_isempty(&head.list))
396     {
397         ctx = rt_list_first_entry(&head.list, struct mem_alloc_context, node);
398         rt_list_remove(&ctx->node);
399         if (ctx->size > sizeof(*ctx))
400         {
401             if (_mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx)) != 0)
402             {
403                 uassert_true(0);
404             }
405         }
406         rt_memset(ctx, 0xAA, ctx->size);
407         rt_smem_free(ctx);
408         head.count --;
409     }
410     uassert_int_equal(head.count, 0);
411     uassert_int_equal(max_block(heap), total_size);
412     /* small heap deinit */
413     rt_smem_detach(&heap->parent);
414     /* release test resources */
415     rt_free(buf);
416 }
417 
418 #define MEM_RANG_REALLOC_BLK_MIN  0
419 #define MEM_RANG_REALLOC_BLK_MAX  5
420 #define MEM_RANG_REALLOC_TEST_TIME 5
421 
422 struct mem_realloc_context
423 {
424     rt_size_t size;
425     rt_uint8_t magic;
426 };
427 
428 struct mem_realloc_head
429 {
430     struct mem_realloc_context **ctx_tab;
431     rt_size_t count;
432     rt_tick_t start;
433     rt_tick_t end;
434     rt_tick_t interval;
435 };
436 
mem_realloc_test(void)437 static void mem_realloc_test(void)
438 {
439     struct mem_realloc_head head;
440     rt_uint8_t *buf;
441     struct rt_small_mem *heap;
442     rt_size_t total_size, size, idx;
443     struct mem_realloc_context *ctx;
444     int res;
445 
446     size = RT_ALIGN(sizeof(struct mem_realloc_context), RT_ALIGN_SIZE) + RT_ALIGN_SIZE;
447     size = TEST_MEM_SIZE / size;
448     /* init */
449     head.ctx_tab = RT_NULL;
450     head.count = size;
451     head.start = rt_tick_get();
452     head.end = rt_tick_get() + rt_tick_from_millisecond(MEM_RANG_ALLOC_TEST_TIME * 1000);
453     head.interval = (head.end - head.start) / 20;
454     buf = rt_malloc(TEST_MEM_SIZE);
455     uassert_not_null(buf);
456     uassert_int_equal(RT_ALIGN((rt_ubase_t)buf, RT_ALIGN_SIZE), (rt_ubase_t)buf);
457     rt_memset(buf, 0xAA, TEST_MEM_SIZE);
458     heap =  (struct rt_small_mem *)rt_smem_init("mem_tc", buf, TEST_MEM_SIZE);
459     total_size = max_block(heap);
460     uassert_int_not_equal(total_size, 0);
461     /* init ctx tab */
462     size = head.count * sizeof(struct mem_realloc_context *);
463     head.ctx_tab = rt_smem_alloc(&heap->parent, size);
464     uassert_not_null(head.ctx_tab);
465     rt_memset(head.ctx_tab, 0, size);
466     /* test run */
467     while (head.end - head.start < RT_TICK_MAX / 2)
468     {
469         if (rt_tick_get() - head.start >= head.interval)
470         {
471             head.start = rt_tick_get();
472             rt_kprintf("#");
473         }
474         size = rand() % MEM_RANG_ALLOC_BLK_MAX + MEM_RANG_ALLOC_BLK_MIN;
475         size *= sizeof(struct mem_realloc_context);
476         idx = rand() % head.count;
477         ctx = rt_smem_realloc(&heap->parent, head.ctx_tab[idx], size);
478         if (ctx == RT_NULL)
479         {
480             if (size == 0)
481             {
482                 if (head.ctx_tab[idx])
483                 {
484                     head.ctx_tab[idx] = RT_NULL;
485                 }
486             }
487             else
488             {
489                 for (idx = 0; idx < head.count; idx++)
490                 {
491                     ctx = head.ctx_tab[idx];
492                     if (rand() % 2 && ctx)
493                     {
494                         if (ctx->size > sizeof(*ctx))
495                         {
496                             res = _mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx));
497                             if (res != 0)
498                             {
499                                 uassert_int_equal(res, 0);
500                             }
501                         }
502                         rt_memset(ctx, 0xAA, ctx->size);
503                         rt_smem_realloc(&heap->parent, ctx, 0);
504                         head.ctx_tab[idx] = RT_NULL;
505                     }
506                 }
507             }
508             continue;
509         }
510         /* check mem */
511         if (head.ctx_tab[idx] != RT_NULL)
512         {
513             res = 0;
514             if (ctx->size < size)
515             {
516                 if (ctx->size > sizeof(*ctx))
517                 {
518                     res = _mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx));
519                 }
520             }
521             else
522             {
523                 if (size > sizeof(*ctx))
524                 {
525                     res = _mem_cmp(&ctx[1], ctx->magic, size - sizeof(*ctx));
526                 }
527             }
528             if (res != 0)
529             {
530                 uassert_int_equal(res, 0);
531             }
532         }
533         /* init mem */
534         ctx->magic = rand() & 0xff;
535         ctx->size = size;
536         if (ctx->size > sizeof(*ctx))
537         {
538             rt_memset(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx));
539         }
540         head.ctx_tab[idx] = ctx;
541     }
542     /* free all mem */
543     for (idx = 0; idx < head.count; idx++)
544     {
545         ctx = head.ctx_tab[idx];
546         if (ctx == RT_NULL)
547         {
548             continue;
549         }
550         if (ctx->size > sizeof(*ctx))
551         {
552             res = _mem_cmp(&ctx[1], ctx->magic, ctx->size - sizeof(*ctx));
553             if (res != 0)
554             {
555                 uassert_int_equal(res, 0);
556             }
557         }
558         rt_memset(ctx, 0xAA, ctx->size);
559         rt_smem_realloc(&heap->parent, ctx, 0);
560         head.ctx_tab[idx] = RT_NULL;
561     }
562     uassert_int_not_equal(max_block(heap), total_size);
563     /* small heap deinit */
564     rt_smem_detach(&heap->parent);
565     /* release test resources */
566     rt_free(buf);
567 }
568 
utest_tc_init(void)569 static rt_err_t utest_tc_init(void)
570 {
571     return RT_EOK;
572 }
573 
utest_tc_cleanup(void)574 static rt_err_t utest_tc_cleanup(void)
575 {
576     return RT_EOK;
577 }
578 
testcase(void)579 static void testcase(void)
580 {
581     UTEST_UNIT_RUN(mem_functional_test);
582     UTEST_UNIT_RUN(mem_alloc_test);
583     UTEST_UNIT_RUN(mem_realloc_test);
584 }
585 UTEST_TC_EXPORT(testcase, "testcases.kernel.mem_tc", utest_tc_init, utest_tc_cleanup, 20);
586