| /third_party/ulib/backtrace/ |
| A D | mmap.c | 64 size_t size; member 79 p->size = size; in backtrace_free_locked() 115 if ((*pp)->size >= size) in backtrace_alloc() 124 size = (size + 7) & ~ (size_t) 7; in backtrace_alloc() 125 if (size < p->size) in backtrace_alloc() 127 p->size - size); in backtrace_alloc() 154 size = (size + 7) & ~ (size_t) 7; in backtrace_alloc() 230 alc = vec->size + size; in backtrace_vector_grow() 258 vec->size += size; in backtrace_vector_grow() 288 size_t size; in backtrace_vector_release() local [all …]
|
| A D | alloc.c | 57 ret = malloc (size); in backtrace_alloc() 86 if (size > vec->alc) in backtrace_vector_grow() 91 if (vec->size == 0) in backtrace_vector_grow() 92 alc = 32 * size; in backtrace_vector_grow() 94 alc = vec->size + 4096; in backtrace_vector_grow() 96 alc = 2 * vec->size; in backtrace_vector_grow() 98 if (alc < vec->size + size) in backtrace_vector_grow() 99 alc = vec->size + size; in backtrace_vector_grow() 113 vec->size += size; in backtrace_vector_grow() 114 vec->alc -= size; in backtrace_vector_grow() [all …]
|
| A D | sort.c | 46 swap (char *a, char *b, size_t size) in swap() argument 50 for (i = 0; i < size; i++, a++, b++) in swap() 61 backtrace_qsort (void *basearg, size_t count, size_t size, in backtrace_qsort() argument 76 swap (base, base + (count / 2) * size, size); in backtrace_qsort() 81 if ((*compar) (base, base + i * size) > 0) in backtrace_qsort() 85 swap (base + mid * size, base + i * size, size); in backtrace_qsort() 90 swap (base, base + mid * size, size); in backtrace_qsort() 96 backtrace_qsort (base, mid, size, compar); in backtrace_qsort() 97 base += (mid + 1) * size; in backtrace_qsort() 103 backtrace_qsort (base + (mid + 1) * size, count - (mid + 1), in backtrace_qsort() [all …]
|
| /third_party/ulib/jemalloc/src/ |
| A D | jemalloc_cpp.cpp | 18 void *operator new(std::size_t size); 19 void *operator new[](std::size_t size); 38 void *ptr = je_malloc(size); in newImpl() 61 ptr = je_malloc(size); in newImpl() 70 operator new(std::size_t size) in operator new() argument 72 return (newImpl<false>(size)); in operator new() 76 operator new[](std::size_t size) in operator new[]() argument 78 return (newImpl<false>(size)); in operator new[]() 84 return (newImpl<true>(size)); in operator new() 90 return (newImpl<true>(size)); in operator new[]() [all …]
|
| A D | pages.c | 119 assert(size != 0); in pages_map() 132 ret = fuchsia_pages_map(addr, size); in pages_map() 151 pages_unmap(ret, size); in pages_map() 161 pages_unmap(void *addr, size_t size) in pages_unmap() argument 169 if (munmap(addr, size) == -1) in pages_unmap() 209 pages_unmap(new_addr, size); in pages_trim() 250 pages_unmap(result, size); in pages_commit_impl() 279 madvise(addr, size, MADV_FREE); in pages_purge_lazy() 300 pages_huge(void *addr, size_t size) in pages_huge() argument 303 assert(HUGEPAGE_CEILING(size) == size); in pages_huge() [all …]
|
| A D | base.c | 18 assert(size == HUGEPAGE_CEILING(size)); in base_map() 59 !extent_hooks->decommit(extent_hooks, addr, size, 0, size, in base_unmap() 63 !extent_hooks->purge_lazy(extent_hooks, addr, size, 0, size, in base_unmap() 68 size, ind)) in base_unmap() 76 size_t size) in base_extent_init() argument 93 assert(size == ALIGNMENT_CEILING(size, alignment)); in base_extent_bump_alloc_helper() 100 *gap_size + size), extent_size_get(extent) - *gap_size - size, 0, in base_extent_bump_alloc_helper() 120 base->allocated += size; in base_extent_bump_alloc_post() 164 block->size = block_size; in base_block_alloc() 237 base->mapped = block->size; in base_new() [all …]
|
| A D | extent_mmap.c | 7 extent_alloc_mmap_slow(size_t size, size_t alignment, bool *zero, bool *commit) in extent_alloc_mmap_slow() argument 12 alloc_size = size + alignment - PAGE; in extent_alloc_mmap_slow() 14 if (alloc_size < size) in extent_alloc_mmap_slow() 24 ret = pages_trim(pages, alloc_size, leadsize, size, commit); in extent_alloc_mmap_slow() 33 extent_alloc_mmap(void *new_addr, size_t size, size_t alignment, bool *zero, in extent_alloc_mmap() argument 54 ret = pages_map(new_addr, size, commit); in extent_alloc_mmap() 60 pages_unmap(ret, size); in extent_alloc_mmap() 61 return (extent_alloc_mmap_slow(size, alignment, zero, commit)); in extent_alloc_mmap() 70 extent_dalloc_mmap(void *addr, size_t size) in extent_dalloc_mmap() argument 73 pages_unmap(addr, size); in extent_dalloc_mmap()
|
| A D | zone.c | 32 vm_size_t size; member 99 size_t size); 101 size_t size); 145 return (je_malloc(size)); in zone_malloc() 151 return (je_calloc(num, size)); in zone_calloc() 181 return (realloc(ptr, size)); in zone_realloc() 201 assert(alloc_size == size); in zone_free_definite_size() 223 results[i] = je_malloc(size); in zone_batch_malloc() 252 if (size == 0) in zone_good_size() 253 size = 1; in zone_good_size() [all …]
|
| /third_party/ulib/jemalloc/test/integration/ |
| A D | posix_memalign.c | 39 size_t alignment, size; in TEST_BEGIN() local 47 size = 0x80000000LU; in TEST_BEGIN() 51 alignment, size); in TEST_BEGIN() 58 size = 0xc0000001LU; in TEST_BEGIN() 62 alignment, size); in TEST_BEGIN() 68 size = 0xfffffff0LU; in TEST_BEGIN() 72 alignment, size); in TEST_BEGIN() 91 for (size = 1; in TEST_BEGIN() 92 size < 3 * alignment && size < (1U << 31); in TEST_BEGIN() 96 alignment, size); in TEST_BEGIN() [all …]
|
| A D | aligned_alloc.c | 41 size_t alignment, size; in TEST_BEGIN() local 49 size = 0x80000000LU; in TEST_BEGIN() 55 alignment, size); in TEST_BEGIN() 62 size = 0xc0000001LU; in TEST_BEGIN() 68 alignment, size); in TEST_BEGIN() 74 size = 0xfffffff0LU; in TEST_BEGIN() 80 alignment, size); in TEST_BEGIN() 87 size_t alignment, size, total; in TEST_BEGIN() local 98 for (size = 1; in TEST_BEGIN() 99 size < 3 * alignment && size < (1U << 31); in TEST_BEGIN() [all …]
|
| A D | thread_arena.c | 11 size_t size; in thd_start() local 18 size = sizeof(arena_ind); in thd_start() 19 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, in thd_start() 27 size = sizeof(arena_ind); in thd_start() 28 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL, in thd_start() 45 size_t size; in TEST_BEGIN() local 53 size = sizeof(arena_ind); in TEST_BEGIN() 54 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL, in TEST_BEGIN()
|
| /third_party/ulib/jemalloc/include/jemalloc/internal/ |
| A D | pages_externs.h | 21 void *pages_map(void *addr, size_t size, bool *commit); 22 void pages_unmap(void *addr, size_t size); 24 size_t size, bool *commit); 25 bool pages_commit(void *addr, size_t size); 26 bool pages_decommit(void *addr, size_t size); 27 bool pages_purge_lazy(void *addr, size_t size); 28 bool pages_purge_forced(void *addr, size_t size); 29 bool pages_huge(void *addr, size_t size); 30 bool pages_nohuge(void *addr, size_t size);
|
| A D | jemalloc_internal.h | 672 size2index(size_t size) in size2index() argument 674 assert(size > 0); in size2index() 746 s2u_lookup(size_t size) in s2u_lookup() argument 759 s2u(size_t size) in s2u() argument 761 assert(size > 0); in s2u() 808 usize = s2u(size); in sa2u() 809 if (usize < size) { in sa2u() 991 assert(size != 0); in iallocztm() 1126 copysize = (size < oldsize) ? size : oldsize; in iralloct_realign() 1137 assert(size != 0); in iralloct() [all …]
|
| /third_party/ulib/musl/sanitizers/ |
| A D | asan-stubs.c | 32 __WEAK void __asan_set_shadow_##xx(uintptr_t addr, uintptr_t size) { \ 33 __unsanitized_memset((void*)addr, 0x##xx, size); \ 54 #define ASAN_REPORT_ERROR(type, is_write, size) \ argument 55 TRAP_STUB(void __asan_report_##type##size(uintptr_t addr)) \ 56 TRAP_STUB(void __asan_report_exp_##type##size(uintptr_t addr, \ 58 TRAP_STUB(void __asan_report_##type##size##_noabort(uintptr_t addr)) 71 TRAP_STUB(void __asan_report_load_n(uintptr_t addr, size_t size)) 73 TRAP_STUB(void __asan_report_exp_load_n(uintptr_t addr, size_t size, 76 TRAP_STUB(void __asan_report_store_n(uintptr_t addr, size_t size)) 78 TRAP_STUB(void __asan_report_exp_store_n(uintptr_t addr, size_t size, [all …]
|
| /third_party/ulib/jemalloc/test/include/test/ |
| A D | extent_hooks.h | 7 size_t size, size_t alignment, bool *zero, bool *commit, 10 size_t size, bool committed, unsigned arena_ind); 12 size_t size, size_t offset, size_t length, unsigned arena_ind); 14 size_t size, size_t offset, size_t length, unsigned arena_ind); 16 size_t size, size_t offset, size_t length, unsigned arena_ind); 20 size_t size, size_t size_a, size_t size_b, bool committed, 82 new_addr, size, alignment, *zero ? "true" : "false", *commit ? in extent_alloc_hook() 91 ret = default_hooks->alloc(default_hooks, new_addr, size, alignment, in extent_alloc_hook() 202 default_hooks->purge_forced(default_hooks, addr, size, offset, in extent_purge_forced_hook() 216 addr, size, size_a, size_b, committed ? "true" : "false", in extent_split_hook() [all …]
|
| A D | btalloc.h | 2 void *btalloc(size_t size, unsigned bits); 5 void *btalloc_##n(size_t size, unsigned bits); 11 btalloc_##n(size_t size, unsigned bits) \ 16 p = mallocx(size, 0); \ 20 p = (btalloc_0(size, bits >> 1)); \ 23 p = (btalloc_1(size, bits >> 1)); \
|
| /third_party/ulib/ngunwind/src/mi/ |
| A D | mempool.c | 47 sos_alloc (size_t size) in sos_alloc() argument 51 size = UNW_ALIGN(size, MAX_ALIGN); in sos_alloc() 72 sos_memory_freepos += size; in sos_alloc() 78 assert ((pos+size) <= SOS_MEMORY_SIZE); in sos_alloc() 107 size_t size; in expand() local 110 size = pool->chunk_size; in expand() 111 GET_MEMORY (mem, size); in expand() 115 GET_MEMORY (mem, size); in expand() 119 size = pool->obj_size; in expand() 120 mem = sos_alloc (size); in expand() [all …]
|
| /third_party/ulib/jemalloc/include/jemalloc/ |
| A D | jemalloc_protos.h | 11 void JEMALLOC_NOTHROW *je_malloc(size_t size) 14 void JEMALLOC_NOTHROW *je_calloc(size_t num, size_t size) 20 size_t size) JEMALLOC_CXX_THROW JEMALLOC_ATTR(malloc) 23 void JEMALLOC_NOTHROW *je_realloc(void *ptr, size_t size) 29 void JEMALLOC_NOTHROW *je_mallocx(size_t size, int flags) 32 void JEMALLOC_NOTHROW *je_rallocx(void *ptr, size_t size, 34 JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW je_xallocx(void *ptr, size_t size, 39 JEMALLOC_EXPORT void JEMALLOC_NOTHROW je_sdallocx(void *ptr, size_t size, 41 JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW je_nallocx(size_t size, int flags) 58 void JEMALLOC_NOTHROW *je_memalign(size_t alignment, size_t size) [all …]
|
| A D | jemalloc_protos_jet.h | 11 void JEMALLOC_NOTHROW *jet_malloc(size_t size) 14 void JEMALLOC_NOTHROW *jet_calloc(size_t num, size_t size) 20 size_t size) JEMALLOC_CXX_THROW JEMALLOC_ATTR(malloc) 23 void JEMALLOC_NOTHROW *jet_realloc(void *ptr, size_t size) 29 void JEMALLOC_NOTHROW *jet_mallocx(size_t size, int flags) 32 void JEMALLOC_NOTHROW *jet_rallocx(void *ptr, size_t size, 34 JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW jet_xallocx(void *ptr, size_t size, 39 JEMALLOC_EXPORT void JEMALLOC_NOTHROW jet_sdallocx(void *ptr, size_t size, 41 JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW jet_nallocx(size_t size, int flags) 58 void JEMALLOC_NOTHROW *jet_memalign(size_t alignment, size_t size) [all …]
|
| /third_party/ulib/musl/include/ |
| A D | sched.h | 56 #define __CPU_op_S(i, size, set, op) \ argument 57 ((i) / 8U >= (size) \ 61 #define CPU_SET_S(i, size, set) __CPU_op_S(i, size, set, |=) argument 62 #define CPU_CLR_S(i, size, set) __CPU_op_S(i, size, set, &= ~) argument 63 #define CPU_ISSET_S(i, size, set) __CPU_op_S(i, size, set, &) argument 79 #define CPU_COUNT_S(size, set) __sched_cpucount(size, set) argument 80 #define CPU_ZERO_S(size, set) memset(set, 0, size) argument 81 #define CPU_EQUAL_S(size, set1, set2) (!memcmp(set1, set2, size)) argument
|
| /third_party/ulib/musl/src/stdio/ |
| A D | vfwscanf.c | 26 switch (size) { in store_int() 90 int size; in vfwscanf() local 156 size = SIZE_def; in vfwscanf() 160 p++, size = SIZE_hh; in vfwscanf() 162 size = SIZE_h; in vfwscanf() 166 p++, size = SIZE_ll; in vfwscanf() 168 size = SIZE_l; in vfwscanf() 171 size = SIZE_ll; in vfwscanf() 175 size = SIZE_l; in vfwscanf() 178 size = SIZE_L; in vfwscanf() [all …]
|
| A D | fmemopen.c | 7 size_t pos, len, size; member 21 if (off < -base || off > (ssize_t)c->size - base) in mseek() 58 rem = c->size - c->pos; in mwrite() 65 if (c->len < c->size) in mwrite() 67 else if ((f->flags & F_NORD) && c->size) in mwrite() 68 c->buf[c->size - 1] = 0; in mwrite() 82 if (!size || !strchr("rwa", *mode)) { in fmemopen() 87 if (!buf && size > SIZE_MAX - sizeof(FILE) - BUFSIZ - UNGET) { in fmemopen() 104 c->size = size; in fmemopen() 110 c->len = size; in fmemopen() [all …]
|
| A D | vfscanf.c | 25 switch (size) { in store_int() 58 int size; in vfscanf() local 128 size = SIZE_def; in vfscanf() 134 size = SIZE_h; in vfscanf() 140 size = SIZE_l; in vfscanf() 143 size = SIZE_ll; in vfscanf() 147 size = SIZE_l; in vfscanf() 150 size = SIZE_L; in vfscanf() 184 size = SIZE_l; in vfscanf() 247 if (size == SIZE_l) { in vfscanf() [all …]
|
| /third_party/ulib/musl/third_party/tre/ |
| A D | tre-mem.c | 79 void* tre_mem_alloc_impl(tre_mem_t mem, int provided, void* provided_block, int zero, size_t size) { in tre_mem_alloc_impl() argument 86 if (mem->n < size) { in tre_mem_alloc_impl() 99 if (size * 8 > TRE_MEM_BLOCK_SIZE) in tre_mem_alloc_impl() 100 block_size = size * 8; in tre_mem_alloc_impl() 126 size += ALIGN(mem->ptr + size, long); in tre_mem_alloc_impl() 130 mem->ptr += size; in tre_mem_alloc_impl() 131 mem->n -= size; in tre_mem_alloc_impl() 135 memset(ptr, 0, size); in tre_mem_alloc_impl()
|
| /third_party/ulib/musl/src/locale/ |
| A D | __mo_lookup.c | 8 const char* __mo_lookup(const void* p, size_t size, const char* s) { in __mo_lookup() argument 14 if (n >= size / 4 || o >= size - 4 * n || t >= size - 4 * n || ((o | t) % 4)) in __mo_lookup() 21 if (os >= size || ol >= size - os || ((char*)p)[os + ol]) in __mo_lookup() 27 if (ts >= size || tl >= size - ts || ((char*)p)[ts + tl]) in __mo_lookup()
|