Lines Matching refs:m
2754 static msegmentptr segment_holding(mstate m, char* addr) { in segment_holding() argument
2755 msegmentptr sp = &m->seg; in segment_holding()
2765 static int has_segment_link(mstate m, msegmentptr ss) { in has_segment_link() argument
2766 msegmentptr sp = &m->seg; in has_segment_link()
2827 static void reset_on_error(mstate m);
2829 #define CORRUPTION_ERROR_ACTION(m) reset_on_error(m) argument
2830 #define USAGE_ERROR_ACTION(m, p) argument
2835 #define CORRUPTION_ERROR_ACTION(m) ABORT argument
2839 #define USAGE_ERROR_ACTION(m,p) ABORT argument
2864 static void do_check_any_chunk(mstate m, mchunkptr p);
2865 static void do_check_top_chunk(mstate m, mchunkptr p);
2866 static void do_check_mmapped_chunk(mstate m, mchunkptr p);
2867 static void do_check_inuse_chunk(mstate m, mchunkptr p);
2868 static void do_check_free_chunk(mstate m, mchunkptr p);
2869 static void do_check_malloced_chunk(mstate m, void* mem, size_t s);
2870 static void do_check_tree(mstate m, tchunkptr t);
2871 static void do_check_treebin(mstate m, bindex_t i);
2872 static void do_check_smallbin(mstate m, bindex_t i);
2873 static void do_check_malloc_state(mstate m);
2874 static int bin_find(mstate m, mchunkptr x);
2875 static size_t traverse_and_check(mstate m);
3272 static void do_check_any_chunk(mstate m, mchunkptr p) { in do_check_any_chunk() argument
3274 assert(ok_address(m, p)); in do_check_any_chunk()
3278 static void do_check_top_chunk(mstate m, mchunkptr p) { in do_check_top_chunk() argument
3279 msegmentptr sp = segment_holding(m, (char*)p); in do_check_top_chunk()
3283 assert(ok_address(m, p)); in do_check_top_chunk()
3284 assert(sz == m->topsize); in do_check_top_chunk()
3292 static void do_check_mmapped_chunk(mstate m, mchunkptr p) { in do_check_mmapped_chunk() argument
3296 assert(use_mmap(m)); in do_check_mmapped_chunk()
3298 assert(ok_address(m, p)); in do_check_mmapped_chunk()
3306 static void do_check_inuse_chunk(mstate m, mchunkptr p) { in do_check_inuse_chunk() argument
3307 do_check_any_chunk(m, p); in do_check_inuse_chunk()
3313 do_check_mmapped_chunk(m, p); in do_check_inuse_chunk()
3317 static void do_check_free_chunk(mstate m, mchunkptr p) { in do_check_free_chunk() argument
3320 do_check_any_chunk(m, p); in do_check_free_chunk()
3324 if (p != m->dv && p != m->top) { in do_check_free_chunk()
3330 assert (next == m->top || is_inuse(next)); in do_check_free_chunk()
3340 static void do_check_malloced_chunk(mstate m, void* mem, size_t s) { in do_check_malloced_chunk() argument
3344 do_check_inuse_chunk(m, p); in do_check_malloced_chunk()
3354 static void do_check_tree(mstate m, tchunkptr t) { in do_check_tree() argument
3367 do_check_any_chunk(m, ((mchunkptr)u)); in do_check_tree()
3388 do_check_tree(m, u->child[0]); in do_check_tree()
3393 do_check_tree(m, u->child[1]); in do_check_tree()
3405 static void do_check_treebin(mstate m, bindex_t i) { in do_check_treebin() argument
3406 tbinptr* tb = treebin_at(m, i); in do_check_treebin()
3408 int empty = (m->treemap & (1U << i)) == 0; in do_check_treebin()
3412 do_check_tree(m, t); in do_check_treebin()
3416 static void do_check_smallbin(mstate m, bindex_t i) { in do_check_smallbin() argument
3417 sbinptr b = smallbin_at(m, i); in do_check_smallbin()
3419 unsigned int empty = (m->smallmap & (1U << i)) == 0; in do_check_smallbin()
3427 do_check_free_chunk(m, p); in do_check_smallbin()
3434 do_check_inuse_chunk(m, q); in do_check_smallbin()
3440 static int bin_find(mstate m, mchunkptr x) { in bin_find() argument
3444 sbinptr b = smallbin_at(m, sidx); in bin_find()
3445 if (smallmap_is_marked(m, sidx)) { in bin_find()
3456 if (treemap_is_marked(m, tidx)) { in bin_find()
3457 tchunkptr t = *treebin_at(m, tidx); in bin_find()
3476 static size_t traverse_and_check(mstate m) { in traverse_and_check() argument
3478 if (is_initialized(m)) { in traverse_and_check()
3479 msegmentptr s = &m->seg; in traverse_and_check()
3480 sum += m->topsize + TOP_FOOT_SIZE; in traverse_and_check()
3486 q != m->top && q->head != FENCEPOST_HEAD) { in traverse_and_check()
3489 assert(!bin_find(m, q)); in traverse_and_check()
3490 do_check_inuse_chunk(m, q); in traverse_and_check()
3493 assert(q == m->dv || bin_find(m, q)); in traverse_and_check()
3495 do_check_free_chunk(m, q); in traverse_and_check()
3508 static void do_check_malloc_state(mstate m) { in do_check_malloc_state() argument
3513 do_check_smallbin(m, i); in do_check_malloc_state()
3515 do_check_treebin(m, i); in do_check_malloc_state()
3517 if (m->dvsize != 0) { /* check dv chunk */ in do_check_malloc_state()
3518 do_check_any_chunk(m, m->dv); in do_check_malloc_state()
3519 assert(m->dvsize == chunksize(m->dv)); in do_check_malloc_state()
3520 assert(m->dvsize >= MIN_CHUNK_SIZE); in do_check_malloc_state()
3521 assert(bin_find(m, m->dv) == 0); in do_check_malloc_state()
3524 if (m->top != 0) { /* check top chunk */ in do_check_malloc_state()
3525 do_check_top_chunk(m, m->top); in do_check_malloc_state()
3527 assert(m->topsize > 0); in do_check_malloc_state()
3528 assert(bin_find(m, m->top) == 0); in do_check_malloc_state()
3531 total = traverse_and_check(m); in do_check_malloc_state()
3532 assert(total <= m->footprint); in do_check_malloc_state()
3533 assert(m->footprint <= m->max_footprint); in do_check_malloc_state()
3540 static struct mallinfo internal_mallinfo(mstate m) { in internal_mallinfo() argument
3543 if (!PREACTION(m)) { in internal_mallinfo()
3544 check_malloc_state(m); in internal_mallinfo()
3545 if (is_initialized(m)) { in internal_mallinfo()
3547 size_t mfree = m->topsize + TOP_FOOT_SIZE; in internal_mallinfo()
3549 msegmentptr s = &m->seg; in internal_mallinfo()
3553 q != m->top && q->head != FENCEPOST_HEAD) { in internal_mallinfo()
3567 nm.hblkhd = m->footprint - sum; in internal_mallinfo()
3568 nm.usmblks = m->max_footprint; in internal_mallinfo()
3569 nm.uordblks = m->footprint - mfree; in internal_mallinfo()
3571 nm.keepcost = m->topsize; in internal_mallinfo()
3574 POSTACTION(m); in internal_mallinfo()
3581 static void internal_malloc_stats(mstate m) { in internal_malloc_stats() argument
3583 if (!PREACTION(m)) { in internal_malloc_stats()
3587 check_malloc_state(m); in internal_malloc_stats()
3588 if (is_initialized(m)) { in internal_malloc_stats()
3589 msegmentptr s = &m->seg; in internal_malloc_stats()
3590 maxfp = m->max_footprint; in internal_malloc_stats()
3591 fp = m->footprint; in internal_malloc_stats()
3592 used = fp - (m->topsize + TOP_FOOT_SIZE); in internal_malloc_stats()
3597 q != m->top && q->head != FENCEPOST_HEAD) { in internal_malloc_stats()
3605 POSTACTION(m); /* drop lock */ in internal_malloc_stats()
3854 #define internal_malloc(m, b) mspace_malloc(m, b) argument
3855 #define internal_free(m, mem) mspace_free(m,mem); argument
3858 #define internal_malloc(m, b)\ argument
3859 ((m == gm)? dlmalloc(b) : mspace_malloc(m, b))
3860 #define internal_free(m, mem)\ argument
3861 if (m == gm) dlfree(mem); else mspace_free(m,mem);
3863 #define internal_malloc(m, b) dlmalloc(b) argument
3864 #define internal_free(m, mem) dlfree(mem) argument
3879 static void* mmap_alloc(mstate m, size_t nb) { in mmap_alloc() argument
3881 if (m->footprint_limit != 0) { in mmap_alloc()
3882 size_t fp = m->footprint + mmsize; in mmap_alloc()
3883 if (fp <= m->footprint || fp > m->footprint_limit) in mmap_alloc()
3894 mark_inuse_foot(m, p, psize); in mmap_alloc()
3898 if (m->least_addr == 0 || mm < m->least_addr) in mmap_alloc()
3899 m->least_addr = mm; in mmap_alloc()
3900 if ((m->footprint += mmsize) > m->max_footprint) in mmap_alloc()
3901 m->max_footprint = m->footprint; in mmap_alloc()
3903 check_mmapped_chunk(m, p); in mmap_alloc()
3911 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb, int flags) { in mmap_resize() argument
3930 mark_inuse_foot(m, newp, psize); in mmap_resize()
3934 if (cp < m->least_addr) in mmap_resize()
3935 m->least_addr = cp; in mmap_resize()
3936 if ((m->footprint += newmmsize - oldmmsize) > m->max_footprint) in mmap_resize()
3937 m->max_footprint = m->footprint; in mmap_resize()
3938 check_mmapped_chunk(m, newp); in mmap_resize()
3949 static void init_top(mstate m, mchunkptr p, size_t psize) { in init_top() argument
3955 m->top = p; in init_top()
3956 m->topsize = psize; in init_top()
3960 m->trim_check = mparams.trim_threshold; /* reset on each update */ in init_top()
3964 static void init_bins(mstate m) { in init_bins() argument
3968 sbinptr bin = smallbin_at(m,i); in init_bins()
3976 static void reset_on_error(mstate m) { in reset_on_error() argument
3980 m->smallmap = m->treemap = 0; in reset_on_error()
3981 m->dvsize = m->topsize = 0; in reset_on_error()
3982 m->seg.base = 0; in reset_on_error()
3983 m->seg.size = 0; in reset_on_error()
3984 m->seg.next = 0; in reset_on_error()
3985 m->top = m->dv = 0; in reset_on_error()
3987 *treebin_at(m, i) = 0; in reset_on_error()
3988 init_bins(m); in reset_on_error()
3993 static void* prepend_alloc(mstate m, char* newbase, char* oldbase, in prepend_alloc() argument
4000 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in prepend_alloc()
4007 if (oldfirst == m->top) { in prepend_alloc()
4008 size_t tsize = m->topsize += qsize; in prepend_alloc()
4009 m->top = q; in prepend_alloc()
4011 check_top_chunk(m, q); in prepend_alloc()
4013 else if (oldfirst == m->dv) { in prepend_alloc()
4014 size_t dsize = m->dvsize += qsize; in prepend_alloc()
4015 m->dv = q; in prepend_alloc()
4021 unlink_chunk(m, oldfirst, nsize); in prepend_alloc()
4026 insert_chunk(m, q, qsize); in prepend_alloc()
4027 check_free_chunk(m, q); in prepend_alloc()
4030 check_malloced_chunk(m, chunk2mem(p), nb); in prepend_alloc()
4035 static void add_segment(mstate m, char* tbase, size_t tsize, flag_t mmapped) { in add_segment() argument
4037 char* old_top = (char*)m->top; in add_segment()
4038 msegmentptr oldsp = segment_holding(m, old_top); in add_segment()
4052 init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE); in add_segment()
4056 set_size_and_pinuse_of_inuse_chunk(m, sp, ssize); in add_segment()
4057 *ss = m->seg; /* Push current record */ in add_segment()
4058 m->seg.base = tbase; in add_segment()
4059 m->seg.size = tsize; in add_segment()
4060 m->seg.sflags = mmapped; in add_segment()
4061 m->seg.next = ss; in add_segment()
4081 insert_chunk(m, q, psize); in add_segment()
4084 check_top_chunk(m, m->top); in add_segment()
4090 static void* sys_alloc(mstate m, size_t nb) { in sys_alloc() argument
4099 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) { in sys_alloc()
4100 void* mem = mmap_alloc(m, nb); in sys_alloc()
4108 if (m->footprint_limit != 0) { in sys_alloc()
4109 size_t fp = m->footprint + asize; in sys_alloc()
4110 if (fp <= m->footprint || fp > m->footprint_limit) in sys_alloc()
4136 if (MORECORE_CONTIGUOUS && !use_noncontiguous(m)) { in sys_alloc()
4139 msegmentptr ss = (m->top == 0)? 0 : segment_holding(m, (char*)m->top); in sys_alloc()
4149 fp = m->footprint + ssize; /* recheck limits */ in sys_alloc()
4151 (m->footprint_limit == 0 || in sys_alloc()
4152 (fp > m->footprint && fp <= m->footprint_limit)) && in sys_alloc()
4161 ssize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING); in sys_alloc()
4191 disable_contiguous(m); /* Don't try contiguous path in the future */ in sys_alloc()
4226 if ((m->footprint += tsize) > m->max_footprint) in sys_alloc()
4227 m->max_footprint = m->footprint; in sys_alloc()
4229 if (!is_initialized(m)) { /* first-time initialization */ in sys_alloc()
4230 if (m->least_addr == 0 || tbase < m->least_addr) in sys_alloc()
4231 m->least_addr = tbase; in sys_alloc()
4232 m->seg.base = tbase; in sys_alloc()
4233 m->seg.size = tsize; in sys_alloc()
4234 m->seg.sflags = mmap_flag; in sys_alloc()
4235 m->magic = mparams.magic; in sys_alloc()
4236 m->release_checks = MAX_RELEASE_CHECK_RATE; in sys_alloc()
4237 init_bins(m); in sys_alloc()
4239 if (is_global(m)) in sys_alloc()
4240 init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE); in sys_alloc()
4245 mchunkptr mn = next_chunk(mem2chunk(m)); in sys_alloc()
4246 init_top(m, mn, (size_t)((tbase + tsize) - (char*)mn) -TOP_FOOT_SIZE); in sys_alloc()
4252 msegmentptr sp = &m->seg; in sys_alloc()
4259 segment_holds(sp, m->top)) { /* append */ in sys_alloc()
4261 init_top(m, m->top, m->topsize + tsize); in sys_alloc()
4264 if (tbase < m->least_addr) in sys_alloc()
4265 m->least_addr = tbase; in sys_alloc()
4266 sp = &m->seg; in sys_alloc()
4275 return prepend_alloc(m, tbase, oldbase, nb); in sys_alloc()
4278 add_segment(m, tbase, tsize, mmap_flag); in sys_alloc()
4282 if (nb < m->topsize) { /* Allocate from new or extended top space */ in sys_alloc()
4283 size_t rsize = m->topsize -= nb; in sys_alloc()
4284 mchunkptr p = m->top; in sys_alloc()
4285 mchunkptr r = m->top = chunk_plus_offset(p, nb); in sys_alloc()
4287 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in sys_alloc()
4288 check_top_chunk(m, m->top); in sys_alloc()
4289 check_malloced_chunk(m, chunk2mem(p), nb); in sys_alloc()
4301 static size_t release_unused_segments(mstate m) { in release_unused_segments() argument
4304 msegmentptr pred = &m->seg; in release_unused_segments()
4318 if (p == m->dv) { in release_unused_segments()
4319 m->dv = 0; in release_unused_segments()
4320 m->dvsize = 0; in release_unused_segments()
4323 unlink_large_chunk(m, tp); in release_unused_segments()
4327 m->footprint -= size; in release_unused_segments()
4333 insert_large_chunk(m, tp, psize); in release_unused_segments()
4343 m->release_checks = (((size_t) nsegs > (size_t) MAX_RELEASE_CHECK_RATE)? in release_unused_segments()
4348 static int sys_trim(mstate m, size_t pad) { in sys_trim() argument
4351 if (pad < MAX_REQUEST && is_initialized(m)) { in sys_trim()
4354 if (m->topsize > pad) { in sys_trim()
4357 size_t extra = ((m->topsize - pad + (unit - SIZE_T_ONE)) / unit - in sys_trim()
4359 msegmentptr sp = segment_holding(m, (char*)m->top); in sys_trim()
4365 !has_segment_link(m, sp)) { /* can't shrink if pinned */ in sys_trim()
4395 m->footprint -= released; in sys_trim()
4396 init_top(m, m->top, m->topsize - released); in sys_trim()
4397 check_top_chunk(m, m->top); in sys_trim()
4403 released += release_unused_segments(m); in sys_trim()
4406 if (released == 0 && m->topsize > m->trim_check) in sys_trim()
4407 m->trim_check = MAX_SIZE_T; in sys_trim()
4416 static void dispose_chunk(mstate m, mchunkptr p, size_t psize) { in dispose_chunk() argument
4424 m->footprint -= psize; in dispose_chunk()
4430 if (RTCHECK(ok_address(m, prev))) { /* consolidate backward */ in dispose_chunk()
4431 if (p != m->dv) { in dispose_chunk()
4432 unlink_chunk(m, p, prevsize); in dispose_chunk()
4435 m->dvsize = psize; in dispose_chunk()
4441 CORRUPTION_ERROR_ACTION(m); in dispose_chunk()
4445 if (RTCHECK(ok_address(m, next))) { in dispose_chunk()
4447 if (next == m->top) { in dispose_chunk()
4448 size_t tsize = m->topsize += psize; in dispose_chunk()
4449 m->top = p; in dispose_chunk()
4451 if (p == m->dv) { in dispose_chunk()
4452 m->dv = 0; in dispose_chunk()
4453 m->dvsize = 0; in dispose_chunk()
4457 else if (next == m->dv) { in dispose_chunk()
4458 size_t dsize = m->dvsize += psize; in dispose_chunk()
4459 m->dv = p; in dispose_chunk()
4466 unlink_chunk(m, next, nsize); in dispose_chunk()
4468 if (p == m->dv) { in dispose_chunk()
4469 m->dvsize = psize; in dispose_chunk()
4477 insert_chunk(m, p, psize); in dispose_chunk()
4480 CORRUPTION_ERROR_ACTION(m); in dispose_chunk()
4487 static void* tmalloc_large(mstate m, size_t nb) { in tmalloc_large() argument
4493 if ((t = *treebin_at(m, idx)) != 0) { in tmalloc_large()
4517 binmap_t leftbits = left_bits(idx2bit(idx)) & m->treemap; in tmalloc_large()
4522 t = *treebin_at(m, i); in tmalloc_large()
4536 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) { in tmalloc_large()
4537 if (RTCHECK(ok_address(m, v))) { /* split */ in tmalloc_large()
4541 unlink_large_chunk(m, v); in tmalloc_large()
4543 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_large()
4545 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_large()
4547 insert_chunk(m, r, rsize); in tmalloc_large()
4552 CORRUPTION_ERROR_ACTION(m); in tmalloc_large()
4558 static void* tmalloc_small(mstate m, size_t nb) { in tmalloc_small() argument
4562 binmap_t leastbit = least_bit(m->treemap); in tmalloc_small()
4564 v = t = *treebin_at(m, i); in tmalloc_small()
4575 if (RTCHECK(ok_address(m, v))) { in tmalloc_small()
4579 unlink_large_chunk(m, v); in tmalloc_small()
4581 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_small()
4583 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_small()
4585 replace_dv(m, r, rsize); in tmalloc_small()
4591 CORRUPTION_ERROR_ACTION(m); in tmalloc_small()
4864 static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb, in try_realloc_chunk() argument
4869 if (RTCHECK(ok_address(m, p) && ok_inuse(p) && in try_realloc_chunk()
4872 newp = mmap_resize(m, p, nb, can_move); in try_realloc_chunk()
4878 set_inuse(m, p, nb); in try_realloc_chunk()
4879 set_inuse(m, r, rsize); in try_realloc_chunk()
4880 dispose_chunk(m, r, rsize); in try_realloc_chunk()
4884 else if (next == m->top) { /* extend into top */ in try_realloc_chunk()
4885 if (oldsize + m->topsize > nb) { in try_realloc_chunk()
4886 size_t newsize = oldsize + m->topsize; in try_realloc_chunk()
4889 set_inuse(m, p, nb); in try_realloc_chunk()
4891 m->top = newtop; in try_realloc_chunk()
4892 m->topsize = newtopsize; in try_realloc_chunk()
4896 else if (next == m->dv) { /* extend into dv */ in try_realloc_chunk()
4897 size_t dvs = m->dvsize; in try_realloc_chunk()
4903 set_inuse(m, p, nb); in try_realloc_chunk()
4906 m->dvsize = dsize; in try_realloc_chunk()
4907 m->dv = r; in try_realloc_chunk()
4911 set_inuse(m, p, newsize); in try_realloc_chunk()
4912 m->dvsize = 0; in try_realloc_chunk()
4913 m->dv = 0; in try_realloc_chunk()
4922 unlink_chunk(m, next, nextsize); in try_realloc_chunk()
4925 set_inuse(m, p, newsize); in try_realloc_chunk()
4929 set_inuse(m, p, nb); in try_realloc_chunk()
4930 set_inuse(m, r, rsize); in try_realloc_chunk()
4931 dispose_chunk(m, r, rsize); in try_realloc_chunk()
4938 USAGE_ERROR_ACTION(m, chunk2mem(p)); in try_realloc_chunk()
4943 static void* internal_memalign(mstate m, size_t alignment, size_t bytes) { in internal_memalign() argument
4953 if (m != 0) { /* Test isn't needed but avoids compiler warning */ in internal_memalign()
4960 mem = internal_malloc(m, req); in internal_memalign()
4963 if (PREACTION(m)) in internal_memalign()
4988 set_inuse(m, newp, newsize); in internal_memalign()
4989 set_inuse(m, p, leadsize); in internal_memalign()
4990 dispose_chunk(m, p, leadsize); in internal_memalign()
5001 set_inuse(m, p, nb); in internal_memalign()
5002 set_inuse(m, remainder, remainder_size); in internal_memalign()
5003 dispose_chunk(m, remainder, remainder_size); in internal_memalign()
5010 check_inuse_chunk(m, p); in internal_memalign()
5011 POSTACTION(m); in internal_memalign()
5024 static void** ialloc(mstate m, in ialloc() argument
5053 return (void**)internal_malloc(m, 0); in ialloc()
5077 was_enabled = use_mmap(m); in ialloc()
5078 disable_mmap(m); in ialloc()
5079 mem = internal_malloc(m, size - CHUNK_OVERHEAD); in ialloc()
5081 enable_mmap(m); in ialloc()
5085 if (PREACTION(m)) return 0; in ialloc()
5101 set_size_and_pinuse_of_inuse_chunk(m, array_chunk, array_chunk_size); in ialloc()
5114 set_size_and_pinuse_of_inuse_chunk(m, p, size); in ialloc()
5118 set_size_and_pinuse_of_inuse_chunk(m, p, remainder_size); in ialloc()
5132 check_inuse_chunk(m, mem2chunk(marray)); in ialloc()
5135 check_inuse_chunk(m, mem2chunk(marray[i])); in ialloc()
5139 POSTACTION(m); in ialloc()
5150 static size_t internal_bulk_free(mstate m, void* array[], size_t nelem) { in internal_bulk_free() argument
5152 if (!PREACTION(m)) { in internal_bulk_free()
5161 if (get_mstate_for(p) != m) { in internal_bulk_free()
5166 check_inuse_chunk(m, p); in internal_bulk_free()
5168 if (RTCHECK(ok_address(m, p) && ok_inuse(p))) { in internal_bulk_free()
5173 set_inuse(m, p, newsize); in internal_bulk_free()
5177 dispose_chunk(m, p, psize); in internal_bulk_free()
5180 CORRUPTION_ERROR_ACTION(m); in internal_bulk_free()
5185 if (should_trim(m, m->topsize)) in internal_bulk_free()
5186 sys_trim(m, 0); in internal_bulk_free()
5187 POSTACTION(m); in internal_bulk_free()
5194 static void internal_inspect_all(mstate m, in internal_inspect_all() argument
5200 if (is_initialized(m)) { in internal_inspect_all()
5201 mchunkptr top = m->top; in internal_inspect_all()
5203 for (s = &m->seg; s != 0; s = s->next) { in internal_inspect_all()
5255 mstate m = gm; in dlrealloc() local
5257 mstate m = get_mstate_for(oldp); in dlrealloc() local
5258 if (!ok_magic(m)) { in dlrealloc()
5259 USAGE_ERROR_ACTION(m, oldmem); in dlrealloc()
5263 if (!PREACTION(m)) { in dlrealloc()
5264 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in dlrealloc()
5265 POSTACTION(m); in dlrealloc()
5267 check_inuse_chunk(m, newp); in dlrealloc()
5271 mem = internal_malloc(m, bytes); in dlrealloc()
5275 internal_free(m, oldmem); in dlrealloc()
5293 mstate m = gm; in dlrealloc_in_place() local
5295 mstate m = get_mstate_for(oldp); in dlrealloc_in_place() local
5296 if (!ok_magic(m)) { in dlrealloc_in_place()
5297 USAGE_ERROR_ACTION(m, oldmem); in dlrealloc_in_place()
5301 if (!PREACTION(m)) { in dlrealloc_in_place()
5302 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in dlrealloc_in_place()
5303 POSTACTION(m); in dlrealloc_in_place()
5305 check_inuse_chunk(m, newp); in dlrealloc_in_place()
5456 mstate m = (mstate)(chunk2mem(msp)); in init_user_mstate() local
5457 memset(m, 0, msize); in init_user_mstate()
5458 (void)INITIAL_LOCK(&m->mutex); in init_user_mstate()
5460 m->seg.base = m->least_addr = tbase; in init_user_mstate()
5461 m->seg.size = m->footprint = m->max_footprint = tsize; in init_user_mstate()
5462 m->magic = mparams.magic; in init_user_mstate()
5463 m->release_checks = MAX_RELEASE_CHECK_RATE; in init_user_mstate()
5464 m->mflags = mparams.default_mflags; in init_user_mstate()
5465 m->extp = 0; in init_user_mstate()
5466 m->exts = 0; in init_user_mstate()
5467 disable_contiguous(m); in init_user_mstate()
5468 init_bins(m); in init_user_mstate()
5469 mn = next_chunk(mem2chunk(m)); in init_user_mstate()
5470 init_top(m, mn, (size_t)((tbase + tsize) - (char*)mn) - TOP_FOOT_SIZE); in init_user_mstate()
5471 check_top_chunk(m, m->top); in init_user_mstate()
5472 return m; in init_user_mstate()
5476 mstate m = 0; in create_mspace() local
5486 m = init_user_mstate(tbase, tsize); in create_mspace()
5487 m->seg.sflags = USE_MMAP_BIT; in create_mspace()
5488 set_lock(m, locked); in create_mspace()
5491 return (mspace)m; in create_mspace()
5495 mstate m = 0; in create_mspace_with_base() local
5501 m = init_user_mstate((char*)base, capacity); in create_mspace_with_base()
5502 m->seg.sflags = EXTERN_BIT; in create_mspace_with_base()
5503 set_lock(m, locked); in create_mspace_with_base()
5505 return (mspace)m; in create_mspace_with_base()
5805 mstate m = (mstate)msp; in mspace_realloc() local
5807 mstate m = get_mstate_for(oldp); in mspace_realloc() local
5808 if (!ok_magic(m)) { in mspace_realloc()
5809 USAGE_ERROR_ACTION(m, oldmem); in mspace_realloc()
5813 if (!PREACTION(m)) { in mspace_realloc()
5814 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in mspace_realloc()
5815 POSTACTION(m); in mspace_realloc()
5817 check_inuse_chunk(m, newp); in mspace_realloc()
5821 mem = mspace_malloc(m, bytes); in mspace_realloc()
5825 mspace_free(m, oldmem); in mspace_realloc()
5843 mstate m = (mstate)msp; in mspace_realloc_in_place() local
5845 mstate m = get_mstate_for(oldp); in mspace_realloc_in_place() local
5847 if (!ok_magic(m)) { in mspace_realloc_in_place()
5848 USAGE_ERROR_ACTION(m, oldmem); in mspace_realloc_in_place()
5852 if (!PREACTION(m)) { in mspace_realloc_in_place()
5853 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in mspace_realloc_in_place()
5854 POSTACTION(m); in mspace_realloc_in_place()
5856 check_inuse_chunk(m, newp); in mspace_realloc_in_place()