Lines Matching refs:gfpflags

2121 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags);
2127 struct page **ret_page, gfp_t gfpflags) in get_partial_node() argument
2147 if (!pfmemalloc_match(page, gfpflags)) in get_partial_node()
2789 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) in slab_out_of_memory() argument
2797 if ((gfpflags & __GFP_NOWARN) || !__ratelimit(&slub_oom_rs)) in slab_out_of_memory()
2801 nid, gfpflags, &gfpflags); in slab_out_of_memory()
2825 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags) in pfmemalloc_match() argument
2828 return gfp_pfmemalloc_allowed(gfpflags); in pfmemalloc_match()
2838 static inline bool pfmemalloc_match_unsafe(struct page *page, gfp_t gfpflags) in pfmemalloc_match_unsafe() argument
2841 return gfp_pfmemalloc_allowed(gfpflags); in pfmemalloc_match_unsafe()
2899 static void *___slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in ___slab_alloc() argument
2942 if (unlikely(!pfmemalloc_match_unsafe(page, gfpflags))) in ___slab_alloc()
3017 freelist = get_partial(s, gfpflags, node, &page); in ___slab_alloc()
3022 page = new_slab(s, gfpflags, node); in ___slab_alloc()
3026 slab_out_of_memory(s, gfpflags, node); in ___slab_alloc()
3054 if (unlikely(!pfmemalloc_match(page, gfpflags))) in ___slab_alloc()
3095 static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in __slab_alloc() argument
3109 p = ___slab_alloc(s, gfpflags, node, addr, c); in __slab_alloc()
3139 gfp_t gfpflags, int node, unsigned long addr, size_t orig_size) in slab_alloc_node() argument
3148 s = slab_pre_alloc_hook(s, &objcg, 1, gfpflags); in slab_alloc_node()
3152 object = kfence_alloc(s, orig_size, gfpflags); in slab_alloc_node()
3200 object = __slab_alloc(s, gfpflags, node, addr, c); in slab_alloc_node()
3231 init = slab_want_init_on_alloc(gfpflags, s); in slab_alloc_node()
3234 slab_post_alloc_hook(s, objcg, gfpflags, 1, &object, init); in slab_alloc_node()
3240 gfp_t gfpflags, unsigned long addr, size_t orig_size) in slab_alloc() argument
3242 return slab_alloc_node(s, gfpflags, NUMA_NO_NODE, addr, orig_size); in slab_alloc()
3245 void *kmem_cache_alloc(struct kmem_cache *s, gfp_t gfpflags) in kmem_cache_alloc() argument
3247 void *ret = slab_alloc(s, gfpflags, _RET_IP_, s->object_size); in kmem_cache_alloc()
3250 s->size, gfpflags); in kmem_cache_alloc()
3257 void *kmem_cache_alloc_trace(struct kmem_cache *s, gfp_t gfpflags, size_t size) in kmem_cache_alloc_trace() argument
3259 void *ret = slab_alloc(s, gfpflags, _RET_IP_, size); in kmem_cache_alloc_trace()
3260 trace_kmalloc(_RET_IP_, ret, size, s->size, gfpflags); in kmem_cache_alloc_trace()
3261 ret = kasan_kmalloc(s, ret, size, gfpflags); in kmem_cache_alloc_trace()
3268 void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags, int node) in kmem_cache_alloc_node() argument
3270 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_, s->object_size); in kmem_cache_alloc_node()
3273 s->object_size, s->size, gfpflags, node); in kmem_cache_alloc_node()
3281 gfp_t gfpflags, in kmem_cache_alloc_node_trace() argument
3284 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_, size); in kmem_cache_alloc_node_trace()
3287 size, s->size, gfpflags, node); in kmem_cache_alloc_node_trace()
3289 ret = kasan_kmalloc(s, ret, size, gfpflags); in kmem_cache_alloc_node_trace()
4912 void *__kmalloc_track_caller(size_t size, gfp_t gfpflags, unsigned long caller) in __kmalloc_track_caller() argument
4918 return kmalloc_large(size, gfpflags); in __kmalloc_track_caller()
4920 s = kmalloc_slab(size, gfpflags); in __kmalloc_track_caller()
4925 ret = slab_alloc(s, gfpflags, caller, size); in __kmalloc_track_caller()
4928 trace_kmalloc(caller, ret, size, s->size, gfpflags); in __kmalloc_track_caller()
4935 void *__kmalloc_node_track_caller(size_t size, gfp_t gfpflags, in __kmalloc_node_track_caller() argument
4942 ret = kmalloc_large_node(size, gfpflags, node); in __kmalloc_node_track_caller()
4946 gfpflags, node); in __kmalloc_node_track_caller()
4951 s = kmalloc_slab(size, gfpflags); in __kmalloc_node_track_caller()
4956 ret = slab_alloc_node(s, gfpflags, node, caller, size); in __kmalloc_node_track_caller()
4959 trace_kmalloc_node(caller, ret, size, s->size, gfpflags, node); in __kmalloc_node_track_caller()