| /lib/ |
| A D | kasprintf.c | 15 char *kvasprintf(gfp_t gfp, const char *fmt, va_list ap) in kvasprintf() argument 25 p = kmalloc_track_caller(first+1, gfp); in kvasprintf() 43 const char *kvasprintf_const(gfp_t gfp, const char *fmt, va_list ap) in kvasprintf_const() argument 46 return kstrdup_const(fmt, gfp); in kvasprintf_const() 48 return kstrdup_const(va_arg(ap, const char*), gfp); in kvasprintf_const() 49 return kvasprintf(gfp, fmt, ap); in kvasprintf_const() 53 char *kasprintf(gfp_t gfp, const char *fmt, ...) in kasprintf() argument 59 p = kvasprintf(gfp, fmt, ap); in kasprintf()
|
| A D | objpool.c | 83 if ((pool->gfp & (GFP_ATOMIC | GFP_KERNEL)) != GFP_ATOMIC) in objpool_init_percpu_slots() 84 slot = __vmalloc_node(size, sizeof(void *), pool->gfp, in objpool_init_percpu_slots() 88 slot = kmalloc_node(size, pool->gfp, cpu_to_node(i)); in objpool_init_percpu_slots() 119 gfp_t gfp, void *context, objpool_init_obj_cb objinit, in objpool_init() argument 142 pool->gfp = gfp & ~__GFP_ZERO; in objpool_init() 146 pool->cpu_slots = kzalloc(slot_size, pool->gfp); in objpool_init()
|
| A D | string_helpers.c | 651 char *kstrdup_quotable(const char *src, gfp_t gfp) in kstrdup_quotable() argument 663 dst = kmalloc(dlen + 1, gfp); in kstrdup_quotable() 701 quoted = kstrdup_quotable(buffer, gfp); in kstrdup_quotable_cmdline() 712 char *kstrdup_quotable_file(struct file *file, gfp_t gfp) in kstrdup_quotable_file() argument 717 return kstrdup("<unknown>", gfp); in kstrdup_quotable_file() 722 return kstrdup("<no_memory>", gfp); in kstrdup_quotable_file() 726 pathname = kstrdup("<too_long>", gfp); in kstrdup_quotable_file() 728 pathname = kstrdup_quotable(pathname, gfp); in kstrdup_quotable_file() 742 dst = kstrdup(src, gfp); in kstrdup_and_replace() 767 names = kcalloc(n + 1, sizeof(char *), gfp); in kasprintf_strarray() [all …]
|
| A D | argv_split.c | 60 char **argv_split(gfp_t gfp, const char *str, int *argcp) in argv_split() argument 67 argv_str = kstrndup(str, KMALLOC_MAX_SIZE - 1, gfp); in argv_split() 72 argv = kmalloc_array(argc + 2, sizeof(*argv), gfp); in argv_split()
|
| A D | idr.c | 34 unsigned long max, gfp_t gfp) in idr_alloc_u32() argument 46 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base); in idr_alloc_u32() 79 int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc() argument 87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp); in idr_alloc() 117 int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc_cyclic() argument 125 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 128 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 381 gfp_t gfp) in ida_alloc_range() argument 454 if (xas_nomem(&xas, gfp)) { in ida_alloc_range() 466 alloc = kzalloc(sizeof(*bitmap), gfp); in ida_alloc_range()
|
| A D | memregion.c | 9 int memregion_alloc(gfp_t gfp) in memregion_alloc() argument 11 return ida_alloc(&memregion_ids, gfp); in memregion_alloc()
|
| A D | flex_proportions.c | 38 int fprop_global_init(struct fprop_global *p, gfp_t gfp) in fprop_global_init() argument 44 err = percpu_counter_init(&p->events, 1, gfp); in fprop_global_init() 91 int fprop_local_init_percpu(struct fprop_local_percpu *pl, gfp_t gfp) in fprop_local_init_percpu() argument 95 err = percpu_counter_init(&pl->events, 0, gfp); in fprop_local_init_percpu()
|
| A D | xarray.c | 308 gfp |= __GFP_ACCOUNT; in xas_nomem() 338 gfp |= __GFP_ACCOUNT; in __xas_nomem() 339 if (gfpflags_allow_blocking(gfp)) { in __xas_nomem() 376 gfp |= __GFP_ACCOUNT; in xas_alloc() 1046 gfp_t gfp) in xas_split_alloc() argument 1182 gfp_t gfp = GFP_NOWAIT; in xas_try_split() local 1189 gfp |= __GFP_ACCOUNT; in xas_try_split() 1215 xas->xa_lru, gfp); in xas_try_split() 1706 } while (__xas_nomem(&xas, gfp)); in __xa_store() 1786 } while (__xas_nomem(&xas, gfp)); in __xa_cmpxchg_raw() [all …]
|
| A D | test_free_pages.c | 14 static void test_free_pages(gfp_t gfp) in test_free_pages() argument 19 unsigned long addr = __get_free_pages(gfp, 3); in test_free_pages()
|
| A D | bucket_locks.c | 15 size_t max_size, unsigned int cpu_mult, gfp_t gfp, in __alloc_bucket_spinlocks() argument 34 tlocks = kvmalloc_array(size, sizeof(spinlock_t), gfp); in __alloc_bucket_spinlocks()
|
| A D | btree.c | 96 node = mempool_alloc(head->mempool, gfp); in btree_node_alloc() 409 gfp_t gfp) in btree_grow() argument 414 node = btree_node_alloc(head, gfp); in btree_grow() 445 gfp_t gfp) in btree_insert_level() argument 452 err = btree_grow(head, geo, gfp); in btree_insert_level() 468 new = btree_node_alloc(head, gfp); in btree_insert_level() 473 new, level + 1, gfp); in btree_insert_level() 506 unsigned long *key, void *val, gfp_t gfp) in btree_insert() argument 509 return btree_insert_level(head, geo, key, val, 1, gfp); in btree_insert() 635 struct btree_geo *geo, gfp_t gfp) in btree_merge() argument [all …]
|
| A D | test_objpool.c | 288 gfp_t gfp = GFP_KERNEL; in ot_init_sync_m0() local 295 gfp = GFP_ATOMIC; in ot_init_sync_m0() 298 gfp, sop, ot_init_node, NULL)) { in ot_init_sync_m0() 454 gfp_t gfp = GFP_KERNEL; in ot_init_async_m0() local 461 gfp = GFP_ATOMIC; in ot_init_async_m0() 463 if (objpool_init(&sop->pool, max, test->objsz, gfp, sop, in ot_init_async_m0()
|
| A D | percpu-refcount.c | 64 unsigned int flags, gfp_t gfp) in percpu_ref_init() argument 72 __alloc_percpu_gfp(sizeof(unsigned long), align, gfp); in percpu_ref_init() 76 data = kzalloc(sizeof(*ref->data), gfp); in percpu_ref_init()
|
| A D | ref_tracker.c | 255 gfp_t gfp) in ref_tracker_alloc() argument 260 gfp_t gfp_mask = gfp | __GFP_NOWARN; in ref_tracker_alloc() 269 if (gfp & __GFP_DIRECT_RECLAIM) in ref_tracker_alloc() 278 tracker->alloc_stack_handle = stack_depot_save(entries, nr_entries, gfp); in ref_tracker_alloc()
|
| A D | maple_tree.c | 1337 mas_alloc_nodes(mas, gfp); in mas_node_count_gfp() 6290 mas_alloc_nodes(mas, gfp); in mas_nomem() 6391 gfp_t gfp) in mtree_store() argument 6423 if (mas_nomem(&ms, gfp)) in mtree_insert_range() 6446 gfp_t gfp) in mtree_insert() argument 6476 if (mas_nomem(&mas, gfp)) in mtree_alloc_range() 6528 next, gfp); in mtree_alloc_cyclic() 6558 if (mas_nomem(&mas, gfp)) in mtree_alloc_rrange() 6676 gfp_t gfp) in mas_dup_alloc() argument 6720 gfp_t gfp) in mas_dup_build() argument [all …]
|
| A D | radix-tree.c | 408 static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp, in radix_tree_extend() argument 425 struct radix_tree_node *node = radix_tree_node_alloc(gfp, NULL, in radix_tree_extend() 607 gfp_t gfp = root_gfp_mask(root); in __radix_tree_create() local 613 int error = radix_tree_extend(root, gfp, max, shift); in __radix_tree_create() 624 child = radix_tree_node_alloc(gfp, node, root, shift, in __radix_tree_create() 1477 struct radix_tree_iter *iter, gfp_t gfp, in idr_get_free() argument 1493 int error = radix_tree_extend(root, gfp, start, shift); in idr_get_free() 1506 child = radix_tree_node_alloc(gfp, node, root, shift, in idr_get_free()
|
| A D | rhashtable.c | 150 gfp_t gfp) in nested_bucket_table_alloc() argument 162 kmalloc_noprof(size, gfp|__GFP_ZERO)); in nested_bucket_table_alloc() 179 gfp_t gfp) in bucket_table_alloc() argument 188 gfp|__GFP_ZERO, NUMA_NO_NODE)); in bucket_table_alloc() 192 if (tbl == NULL && !gfpflags_allow_blocking(gfp)) { in bucket_table_alloc() 193 tbl = nested_bucket_table_alloc(ht, nbuckets, gfp); in bucket_table_alloc()
|
| A D | percpu_counter.c | 189 gfp_t gfp, u32 nr_counters, in __percpu_counter_init_many() argument 199 __alignof__(*counters), gfp); in __percpu_counter_init_many()
|
| A D | scatterlist.c | 591 gfp_t gfp, unsigned int *nent_p) in sgl_alloc_order() argument 610 gfp & ~GFP_DMA); in sgl_alloc_order() 618 page = alloc_pages(gfp, order); in sgl_alloc_order() 643 struct scatterlist *sgl_alloc(unsigned long long length, gfp_t gfp, in sgl_alloc() argument 646 return sgl_alloc_order(length, 0, false, gfp, nent_p); in sgl_alloc()
|
| /lib/kunit/ |
| A D | string-stream.c | 17 static struct string_stream_fragment *alloc_string_stream_fragment(int len, gfp_t gfp) in alloc_string_stream_fragment() argument 21 frag = kzalloc(sizeof(*frag), gfp); in alloc_string_stream_fragment() 25 frag->fragment = kmalloc(len, gfp); in alloc_string_stream_fragment() 67 frag_container = alloc_string_stream_fragment(buf_len, stream->gfp); in string_stream_vadd() 123 buf = kzalloc(buf_len, stream->gfp); in string_stream_get_string() 157 struct string_stream *alloc_string_stream(gfp_t gfp) in alloc_string_stream() argument 161 stream = kzalloc(sizeof(*stream), gfp); in alloc_string_stream() 165 stream->gfp = gfp; in alloc_string_stream() 190 struct string_stream *kunit_alloc_string_stream(struct kunit *test, gfp_t gfp) in kunit_alloc_string_stream() argument 194 stream = alloc_string_stream(gfp); in kunit_alloc_string_stream()
|
| A D | string-stream.h | 26 gfp_t gfp; member 32 struct string_stream *kunit_alloc_string_stream(struct kunit *test, gfp_t gfp); 35 struct string_stream *alloc_string_stream(gfp_t gfp);
|
| A D | test.c | 895 void *kunit_kmalloc_array(struct kunit *test, size_t n, size_t size, gfp_t gfp) in KUNIT_DEFINE_ACTION_WRAPPER() 899 data = kmalloc_array(n, size, gfp); in KUNIT_DEFINE_ACTION_WRAPPER() 929 const char *kunit_kstrdup_const(struct kunit *test, const char *str, gfp_t gfp) in kunit_kstrdup_const() argument 935 return kunit_kstrdup(test, str, gfp); in kunit_kstrdup_const()
|
| /lib/tests/ |
| A D | fortify_kunit.c | 220 gfp_t gfp = GFP_KERNEL | __GFP_NOWARN; \ 255 orig = kmalloc(alloc_size, gfp); \ 258 krealloc(orig, (alloc_size) * 2, gfp), \ 260 orig = kmalloc(alloc_size, gfp); \ 265 orig = kmalloc(alloc_size, gfp); \ 281 gfp_t gfp = GFP_KERNEL | __GFP_NOWARN; \ 293 gfp_t gfp = GFP_KERNEL | __GFP_NOWARN; \ 323 orig = kvmalloc(prev_size, gfp); \ 332 gfp_t gfp = GFP_KERNEL | __GFP_NOWARN; \ 354 devm_kcalloc(dev, 1, alloc_size, gfp), \ [all …]
|
| A D | printf_kunit.c | 638 gfp_t gfp; in flags() local 658 gfp = GFP_TRANSHUGE; in flags() 659 test("GFP_TRANSHUGE", "%pGg", &gfp); in flags() 661 gfp = GFP_ATOMIC|__GFP_DMA; in flags() 664 gfp = __GFP_HIGH; in flags() 665 test("__GFP_HIGH", "%pGg", &gfp); in flags() 668 gfp = ~__GFP_BITS_MASK; in flags() 670 test(cmp_buffer, "%pGg", &gfp); in flags() 673 (unsigned long) gfp); in flags() 674 gfp |= __GFP_HIGH; in flags() [all …]
|
| /lib/reed_solomon/ |
| A D | reed_solomon.c | 71 int fcr, int prim, int nroots, gfp_t gfp) in codec_init() argument 76 rs = kzalloc(sizeof(*rs), gfp); in codec_init() 91 rs->alpha_to = kmalloc_array(rs->nn + 1, sizeof(uint16_t), gfp); in codec_init() 95 rs->index_of = kmalloc_array(rs->nn + 1, sizeof(uint16_t), gfp); in codec_init() 99 rs->genpoly = kmalloc_array(rs->nroots + 1, sizeof(uint16_t), gfp); in codec_init() 215 int prim, int nroots, gfp_t gfp) in init_rs_internal() argument 237 rs = kzalloc(sizeof(*rs) + bsize, gfp); in init_rs_internal() 266 rs->codec = codec_init(symsize, gfpoly, gffunc, fcr, prim, nroots, gfp); in init_rs_internal() 289 int nroots, gfp_t gfp) in init_rs_gfp() argument 291 return init_rs_internal(symsize, gfpoly, NULL, fcr, prim, nroots, gfp); in init_rs_gfp()
|