Lines Matching refs:gfpflags
2249 static inline bool pfmemalloc_match(struct slab *slab, gfp_t gfpflags);
2965 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) in slab_out_of_memory() argument
2972 if ((gfpflags & __GFP_NOWARN) || !__ratelimit(&slub_oom_rs)) in slab_out_of_memory()
2976 nid, gfpflags, &gfpflags); in slab_out_of_memory()
3000 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) { } in slab_out_of_memory() argument
3003 static inline bool pfmemalloc_match(struct slab *slab, gfp_t gfpflags) in pfmemalloc_match() argument
3006 return gfp_pfmemalloc_allowed(gfpflags); in pfmemalloc_match()
3065 static void *___slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in ___slab_alloc() argument
3108 if (unlikely(!pfmemalloc_match(slab, gfpflags))) in ___slab_alloc()
3185 pc.flags = gfpflags; in ___slab_alloc()
3193 slab = new_slab(s, gfpflags, node); in ___slab_alloc()
3197 slab_out_of_memory(s, gfpflags, node); in ___slab_alloc()
3240 if (unlikely(!pfmemalloc_match(slab, gfpflags))) { in ___slab_alloc()
3278 static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in __slab_alloc() argument
3292 p = ___slab_alloc(s, gfpflags, node, addr, c, orig_size); in __slab_alloc()
3300 gfp_t gfpflags, int node, unsigned long addr, size_t orig_size) in __slab_alloc_node() argument
3345 object = __slab_alloc(s, gfpflags, node, addr, c, orig_size); in __slab_alloc_node()
3379 gfp_t gfpflags, int node, unsigned long addr, size_t orig_size) in __slab_alloc_node() argument
3385 pc.flags = gfpflags; in __slab_alloc_node()
3393 slab = new_slab(s, gfpflags, node); in __slab_alloc_node()
3395 slab_out_of_memory(s, gfpflags, node); in __slab_alloc_node()
3428 gfp_t gfpflags, int node, unsigned long addr, size_t orig_size) in slab_alloc_node() argument
3434 s = slab_pre_alloc_hook(s, lru, &objcg, 1, gfpflags); in slab_alloc_node()
3438 object = kfence_alloc(s, orig_size, gfpflags); in slab_alloc_node()
3442 object = __slab_alloc_node(s, gfpflags, node, addr, orig_size); in slab_alloc_node()
3445 init = slab_want_init_on_alloc(gfpflags, s); in slab_alloc_node()
3452 slab_post_alloc_hook(s, objcg, gfpflags, 1, &object, init, orig_size); in slab_alloc_node()
3458 gfp_t gfpflags, unsigned long addr, size_t orig_size) in slab_alloc() argument
3460 return slab_alloc_node(s, lru, gfpflags, NUMA_NO_NODE, addr, orig_size); in slab_alloc()
3465 gfp_t gfpflags) in __kmem_cache_alloc_lru() argument
3467 void *ret = slab_alloc(s, lru, gfpflags, _RET_IP_, s->object_size); in __kmem_cache_alloc_lru()
3469 trace_kmem_cache_alloc(_RET_IP_, ret, s, gfpflags, NUMA_NO_NODE); in __kmem_cache_alloc_lru()
3474 void *kmem_cache_alloc(struct kmem_cache *s, gfp_t gfpflags) in kmem_cache_alloc() argument
3476 return __kmem_cache_alloc_lru(s, NULL, gfpflags); in kmem_cache_alloc()
3481 gfp_t gfpflags) in kmem_cache_alloc_lru() argument
3483 return __kmem_cache_alloc_lru(s, lru, gfpflags); in kmem_cache_alloc_lru()
3487 void *__kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags, in __kmem_cache_alloc_node() argument
3491 return slab_alloc_node(s, NULL, gfpflags, node, in __kmem_cache_alloc_node()
3495 void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags, int node) in kmem_cache_alloc_node() argument
3497 void *ret = slab_alloc_node(s, NULL, gfpflags, node, _RET_IP_, s->object_size); in kmem_cache_alloc_node()
3499 trace_kmem_cache_alloc(_RET_IP_, ret, s, gfpflags, node); in kmem_cache_alloc_node()