Lines Matching refs:size
173 struct kmem_cache *kmem_cache_create(const char *name, unsigned int size,
177 unsigned int size, unsigned int align,
418 static __always_inline unsigned int __kmalloc_index(size_t size, in __kmalloc_index() argument
421 if (!size) in __kmalloc_index()
424 if (size <= KMALLOC_MIN_SIZE) in __kmalloc_index()
427 if (KMALLOC_MIN_SIZE <= 32 && size > 64 && size <= 96) in __kmalloc_index()
429 if (KMALLOC_MIN_SIZE <= 64 && size > 128 && size <= 192) in __kmalloc_index()
431 if (size <= 8) return 3; in __kmalloc_index()
432 if (size <= 16) return 4; in __kmalloc_index()
433 if (size <= 32) return 5; in __kmalloc_index()
434 if (size <= 64) return 6; in __kmalloc_index()
435 if (size <= 128) return 7; in __kmalloc_index()
436 if (size <= 256) return 8; in __kmalloc_index()
437 if (size <= 512) return 9; in __kmalloc_index()
438 if (size <= 1024) return 10; in __kmalloc_index()
439 if (size <= 2 * 1024) return 11; in __kmalloc_index()
440 if (size <= 4 * 1024) return 12; in __kmalloc_index()
441 if (size <= 8 * 1024) return 13; in __kmalloc_index()
442 if (size <= 16 * 1024) return 14; in __kmalloc_index()
443 if (size <= 32 * 1024) return 15; in __kmalloc_index()
444 if (size <= 64 * 1024) return 16; in __kmalloc_index()
445 if (size <= 128 * 1024) return 17; in __kmalloc_index()
446 if (size <= 256 * 1024) return 18; in __kmalloc_index()
447 if (size <= 512 * 1024) return 19; in __kmalloc_index()
448 if (size <= 1024 * 1024) return 20; in __kmalloc_index()
449 if (size <= 2 * 1024 * 1024) return 21; in __kmalloc_index()
463 void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment __alloc_size(1);
487 void kmem_cache_free_bulk(struct kmem_cache *s, size_t size, void **p);
488 int kmem_cache_alloc_bulk(struct kmem_cache *s, gfp_t flags, size_t size, void **p);
494 static __always_inline void kfree_bulk(size_t size, void **p) in kfree_bulk() argument
496 kmem_cache_free_bulk(NULL, size, p); in kfree_bulk()
499 void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment
504 void *kmalloc_trace(struct kmem_cache *s, gfp_t flags, size_t size)
508 int node, size_t size) __assume_kmalloc_alignment
510 void *kmalloc_large(size_t size, gfp_t flags) __assume_page_alignment
513 void *kmalloc_large_node(size_t size, gfp_t flags, int node) __assume_page_alignment
571 static __always_inline __alloc_size(1) void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument
573 if (__builtin_constant_p(size) && size) { in kmalloc()
576 if (size > KMALLOC_MAX_CACHE_SIZE) in kmalloc()
577 return kmalloc_large(size, flags); in kmalloc()
579 index = kmalloc_index(size); in kmalloc()
582 flags, size); in kmalloc()
584 return __kmalloc(size, flags); in kmalloc()
587 static __always_inline __alloc_size(1) void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument
589 if (__builtin_constant_p(size) && size > KMALLOC_MAX_CACHE_SIZE) in kmalloc()
590 return kmalloc_large(size, flags); in kmalloc()
592 return __kmalloc(size, flags); in kmalloc()
597 static __always_inline __alloc_size(1) void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument
599 if (__builtin_constant_p(size) && size) { in kmalloc_node()
602 if (size > KMALLOC_MAX_CACHE_SIZE) in kmalloc_node()
603 return kmalloc_large_node(size, flags, node); in kmalloc_node()
605 index = kmalloc_index(size); in kmalloc_node()
608 flags, node, size); in kmalloc_node()
610 return __kmalloc_node(size, flags, node); in kmalloc_node()
613 static __always_inline __alloc_size(1) void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument
615 if (__builtin_constant_p(size) && size > KMALLOC_MAX_CACHE_SIZE) in kmalloc_node()
616 return kmalloc_large_node(size, flags, node); in kmalloc_node()
618 return __kmalloc_node(size, flags, node); in kmalloc_node()
628 static inline __alloc_size(1, 2) void *kmalloc_array(size_t n, size_t size, gfp_t flags) in kmalloc_array() argument
632 if (unlikely(check_mul_overflow(n, size, &bytes))) in kmalloc_array()
634 if (__builtin_constant_p(n) && __builtin_constant_p(size)) in kmalloc_array()
665 static inline __alloc_size(1, 2) void *kcalloc(size_t n, size_t size, gfp_t flags) in kcalloc() argument
667 return kmalloc_array(n, size, flags | __GFP_ZERO); in kcalloc()
670 void *__kmalloc_node_track_caller(size_t size, gfp_t flags, int node,
672 #define kmalloc_node_track_caller(size, flags, node) \ argument
673 __kmalloc_node_track_caller(size, flags, node, \
684 #define kmalloc_track_caller(size, flags) \ argument
685 __kmalloc_node_track_caller(size, flags, \
688 static inline __alloc_size(1, 2) void *kmalloc_array_node(size_t n, size_t size, gfp_t flags, in kmalloc_array_node() argument
693 if (unlikely(check_mul_overflow(n, size, &bytes))) in kmalloc_array_node()
695 if (__builtin_constant_p(n) && __builtin_constant_p(size)) in kmalloc_array_node()
700 static inline __alloc_size(1, 2) void *kcalloc_node(size_t n, size_t size, gfp_t flags, int node) in kcalloc_node() argument
702 return kmalloc_array_node(n, size, flags | __GFP_ZERO, node); in kcalloc_node()
718 static inline __alloc_size(1) void *kzalloc(size_t size, gfp_t flags) in kzalloc() argument
720 return kmalloc(size, flags | __GFP_ZERO); in kzalloc()
729 static inline __alloc_size(1) void *kzalloc_node(size_t size, gfp_t flags, int node) in kzalloc_node() argument
731 return kmalloc_node(size, flags | __GFP_ZERO, node); in kzalloc_node()
734 extern void *kvmalloc_node(size_t size, gfp_t flags, int node) __alloc_size(1);
735 static inline __alloc_size(1) void *kvmalloc(size_t size, gfp_t flags) in kvmalloc() argument
737 return kvmalloc_node(size, flags, NUMA_NO_NODE); in kvmalloc()
739 static inline __alloc_size(1) void *kvzalloc_node(size_t size, gfp_t flags, int node) in kvzalloc_node() argument
741 return kvmalloc_node(size, flags | __GFP_ZERO, node); in kvzalloc_node()
743 static inline __alloc_size(1) void *kvzalloc(size_t size, gfp_t flags) in kvzalloc() argument
745 return kvmalloc(size, flags | __GFP_ZERO); in kvzalloc()
748 static inline __alloc_size(1, 2) void *kvmalloc_array(size_t n, size_t size, gfp_t flags) in kvmalloc_array() argument
752 if (unlikely(check_mul_overflow(n, size, &bytes))) in kvmalloc_array()
758 static inline __alloc_size(1, 2) void *kvcalloc(size_t n, size_t size, gfp_t flags) in kvcalloc() argument
760 return kvmalloc_array(n, size, flags | __GFP_ZERO); in kvcalloc()
784 size_t kmalloc_size_roundup(size_t size);