Lines Matching refs:cache
156 void __kasan_unpoison_new_object(struct kmem_cache *cache, void *object) in __kasan_unpoison_new_object() argument
158 kasan_unpoison(object, cache->object_size, false); in __kasan_unpoison_new_object()
161 void __kasan_poison_new_object(struct kmem_cache *cache, void *object) in __kasan_poison_new_object() argument
163 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE), in __kasan_poison_new_object()
177 static inline u8 assign_tag(struct kmem_cache *cache, in assign_tag() argument
187 if (!cache->ctor && !(cache->flags & SLAB_TYPESAFE_BY_RCU)) in assign_tag()
198 void * __must_check __kasan_init_slab_obj(struct kmem_cache *cache, in __kasan_init_slab_obj() argument
203 kasan_init_object_meta(cache, object); in __kasan_init_slab_obj()
206 object = set_tag(object, assign_tag(cache, object, true)); in __kasan_init_slab_obj()
212 static bool check_slab_allocation(struct kmem_cache *cache, void *object, in check_slab_allocation() argument
219 if (unlikely(nearest_obj(cache, virt_to_slab(object), object) != object)) { in check_slab_allocation()
232 static inline void poison_slab_object(struct kmem_cache *cache, void *object, in poison_slab_object() argument
239 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE), in poison_slab_object()
243 kasan_save_free_info(cache, tagged_object); in poison_slab_object()
246 bool __kasan_slab_pre_free(struct kmem_cache *cache, void *object, in __kasan_slab_pre_free() argument
251 return check_slab_allocation(cache, object, ip); in __kasan_slab_pre_free()
254 bool __kasan_slab_free(struct kmem_cache *cache, void *object, bool init, in __kasan_slab_free() argument
275 poison_slab_object(cache, object, init); in __kasan_slab_free()
282 if (kasan_quarantine_put(cache, object)) in __kasan_slab_free()
319 static inline void unpoison_slab_object(struct kmem_cache *cache, void *object, in unpoison_slab_object() argument
326 kasan_unpoison(object, cache->object_size, init); in unpoison_slab_object()
329 if (kasan_stack_collection_enabled() && !is_kmalloc_cache(cache)) in unpoison_slab_object()
330 kasan_save_alloc_info(cache, object, flags); in unpoison_slab_object()
333 void * __must_check __kasan_slab_alloc(struct kmem_cache *cache, in __kasan_slab_alloc() argument
352 tag = assign_tag(cache, object, false); in __kasan_slab_alloc()
356 unpoison_slab_object(cache, tagged_object, flags, init); in __kasan_slab_alloc()
361 static inline void poison_kmalloc_redzone(struct kmem_cache *cache, in poison_kmalloc_redzone() argument
378 redzone_end = round_up((unsigned long)(object + cache->object_size), in poison_kmalloc_redzone()
387 if (kasan_stack_collection_enabled() && is_kmalloc_cache(cache)) in poison_kmalloc_redzone()
388 kasan_save_alloc_info(cache, (void *)object, flags); in poison_kmalloc_redzone()
392 void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object, in __kasan_kmalloc() argument
405 poison_kmalloc_redzone(cache, object, size, flags); in __kasan_kmalloc()