Lines Matching refs:mem
1343 DLMALLOC_EXPORT void mspace_free(mspace msp, void* mem);
1354 DLMALLOC_EXPORT void* mspace_realloc(mspace msp, void* mem, size_t newsize);
1406 DLMALLOC_EXPORT size_t mspace_usable_size(const void* mem);
2228 #define mem2chunk(mem) ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES)) argument
2822 static void do_check_malloced_chunk(mstate m, void* mem, size_t s);
3293 static void do_check_malloced_chunk(mstate m, void* mem, size_t s) { in do_check_malloced_chunk() argument
3294 if (mem != 0) { in do_check_malloced_chunk()
3295 mchunkptr p = mem2chunk(mem); in do_check_malloced_chunk()
3808 #define internal_free(m, mem) mspace_free(m,mem); argument
3813 #define internal_free(m, mem)\ argument
3814 if (m == gm) dlfree(mem); else mspace_free(m,mem);
3817 #define internal_free(m, mem) dlfree(mem) argument
4053 void* mem = mmap_alloc(m, nb); in sys_alloc() local
4054 if (mem != 0) in sys_alloc()
4055 return mem; in sys_alloc()
4579 void* mem; in dlmalloc() local
4596 mem = chunk2mem(p); in dlmalloc()
4597 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4623 mem = chunk2mem(p); in dlmalloc()
4624 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4628 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) { in dlmalloc()
4629 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4638 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) { in dlmalloc()
4639 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4659 mem = chunk2mem(p); in dlmalloc()
4660 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4670 mem = chunk2mem(p); in dlmalloc()
4672 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4676 mem = sys_alloc(gm, nb); in dlmalloc()
4680 return mem; in dlmalloc()
4688 void dlfree(void* mem) { in dlfree() argument
4695 if (mem != 0) { in dlfree()
4696 mchunkptr p = mem2chunk(mem); in dlfree()
4798 void* mem; in dlcalloc() local
4806 mem = dlmalloc(req); in dlcalloc()
4807 if (mem != 0 && calloc_must_clear(mem2chunk(mem))) in dlcalloc()
4808 memset(mem, 0, req); in dlcalloc()
4809 return mem; in dlcalloc()
4897 void* mem = 0; in internal_memalign() local
4913 mem = internal_malloc(m, req); in internal_memalign()
4914 if (mem != 0) { in internal_memalign()
4915 mchunkptr p = mem2chunk(mem); in internal_memalign()
4918 if ((((size_t)(mem)) & (alignment - 1)) != 0) { /* misaligned */ in internal_memalign()
4927 char* br = (char*)mem2chunk((size_t)(((size_t)((char*)mem + alignment - in internal_memalign()
4960 mem = chunk2mem(p); in internal_memalign()
4962 assert(((size_t)mem & (alignment - 1)) == 0); in internal_memalign()
4967 return mem; in internal_memalign()
4986 void* mem; /* malloced aggregate space */ in ialloc() local
5032 mem = internal_malloc(m, size - CHUNK_OVERHEAD); in ialloc()
5035 if (mem == 0) in ialloc()
5039 p = mem2chunk(mem); in ialloc()
5045 memset((size_t*)mem, 0, remainder_size - SIZE_T_SIZE - array_size); in ialloc()
5109 void* mem = *a; in internal_bulk_free() local
5110 if (mem != 0) { in internal_bulk_free()
5111 mchunkptr p = mem2chunk(mem); in internal_bulk_free()
5192 void* mem = 0; in dlrealloc() local
5194 mem = dlmalloc(bytes); in dlrealloc()
5221 mem = chunk2mem(newp); in dlrealloc()
5224 mem = internal_malloc(m, bytes); in dlrealloc()
5225 if (mem != 0) { in dlrealloc()
5227 memcpy(mem, oldmem, (oc < bytes)? oc : bytes); in dlrealloc()
5233 return mem; in dlrealloc()
5237 void* mem = 0; in dlrealloc_in_place() local
5259 mem = oldmem; in dlrealloc_in_place()
5264 return mem; in dlrealloc_in_place()
5275 void* mem = 0; in dlposix_memalign() local
5277 mem = dlmalloc(bytes); in dlposix_memalign()
5286 mem = internal_memalign(gm, alignment, bytes); in dlposix_memalign()
5289 if (mem == 0) in dlposix_memalign()
5292 *pp = mem; in dlposix_memalign()
5390 size_t dlmalloc_usable_size(void* mem) { in dlmalloc_usable_size() argument
5391 if (mem != 0) { in dlmalloc_usable_size()
5392 mchunkptr p = mem2chunk(mem); in dlmalloc_usable_size()
5513 void* mem; in mspace_malloc() local
5530 mem = chunk2mem(p); in mspace_malloc()
5531 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5557 mem = chunk2mem(p); in mspace_malloc()
5558 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5562 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) { in mspace_malloc()
5563 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5572 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) { in mspace_malloc()
5573 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5593 mem = chunk2mem(p); in mspace_malloc()
5594 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5604 mem = chunk2mem(p); in mspace_malloc()
5606 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5610 mem = sys_alloc(ms, nb); in mspace_malloc()
5614 return mem; in mspace_malloc()
5620 void mspace_free(mspace msp, void* mem) { in mspace_free() argument
5621 if (mem != 0) { in mspace_free()
5622 mchunkptr p = mem2chunk(mem); in mspace_free()
5722 void* mem; in mspace_calloc() local
5735 mem = internal_malloc(ms, req); in mspace_calloc()
5736 if (mem != 0 && calloc_must_clear(mem2chunk(mem))) in mspace_calloc()
5737 memset(mem, 0, req); in mspace_calloc()
5738 return mem; in mspace_calloc()
5742 void* mem = 0; in mspace_realloc() local
5744 mem = mspace_malloc(msp, bytes); in mspace_realloc()
5771 mem = chunk2mem(newp); in mspace_realloc()
5774 mem = mspace_malloc(m, bytes); in mspace_realloc()
5775 if (mem != 0) { in mspace_realloc()
5777 memcpy(mem, oldmem, (oc < bytes)? oc : bytes); in mspace_realloc()
5783 return mem; in mspace_realloc()
5787 void* mem = 0; in mspace_realloc_in_place() local
5810 mem = oldmem; in mspace_realloc_in_place()
5815 return mem; in mspace_realloc_in_place()
5966 size_t mspace_usable_size(const void* mem) { in mspace_usable_size() argument
5967 if (mem != 0) { in mspace_usable_size()
5968 mchunkptr p = mem2chunk(mem); in mspace_usable_size()