Lines Matching refs:object

247 	void *object;  member
515 static inline void *get_freepointer(struct kmem_cache *s, void *object) in get_freepointer() argument
520 object = kasan_reset_tag(object); in get_freepointer()
521 ptr_addr = (unsigned long)object + s->offset; in get_freepointer()
527 static void prefetch_freepointer(const struct kmem_cache *s, void *object) in prefetch_freepointer() argument
529 prefetchw(object + s->offset); in prefetch_freepointer()
544 static inline void *get_freepointer_safe(struct kmem_cache *s, void *object) in get_freepointer_safe() argument
550 return get_freepointer(s, object); in get_freepointer_safe()
552 object = kasan_reset_tag(object); in get_freepointer_safe()
553 freepointer_addr = (unsigned long)object + s->offset; in get_freepointer_safe()
558 static inline void set_freepointer(struct kmem_cache *s, void *object, void *fp) in set_freepointer() argument
560 unsigned long freeptr_addr = (unsigned long)object + s->offset; in set_freepointer()
563 BUG_ON(object == fp); /* naive detection of double free or corruption */ in set_freepointer()
795 void *object, unsigned int orig_size) in set_orig_size() argument
797 void *p = kasan_reset_tag(object); in set_orig_size()
808 static inline unsigned int get_orig_size(struct kmem_cache *s, void *object) in get_orig_size() argument
810 void *p = kasan_reset_tag(object); in get_orig_size()
812 if (is_kfence_address(object)) in get_orig_size()
813 return kfence_ksize(object); in get_orig_size()
927 struct slab *slab, void *object) in check_valid_pointer() argument
931 if (!object) in check_valid_pointer()
935 object = kasan_reset_tag(object); in check_valid_pointer()
936 object = restore_red_left(s, object); in check_valid_pointer()
937 if (object < base || object >= base + slab->objects * s->size || in check_valid_pointer()
938 (object - base) % s->size) { in check_valid_pointer()
954 static struct track *get_track(struct kmem_cache *s, void *object, in get_track() argument
959 p = object + get_info_end(s); in get_track()
983 static void set_track_update(struct kmem_cache *s, void *object, in set_track_update() argument
987 struct track *p = get_track(s, object, alloc); in set_track_update()
998 static __always_inline void set_track(struct kmem_cache *s, void *object, in set_track() argument
1003 set_track_update(s, object, alloc, addr, handle); in set_track()
1006 static void init_tracking(struct kmem_cache *s, void *object) in init_tracking() argument
1013 p = get_track(s, object, TRACK_ALLOC); in init_tracking()
1035 void print_tracking(struct kmem_cache *s, void *object) in print_tracking() argument
1041 print_track("Allocated", get_track(s, object, TRACK_ALLOC), pr_time); in print_tracking()
1042 print_track("Freed", get_track(s, object, TRACK_FREE), pr_time); in print_tracking()
1052 void skip_orig_size_check(struct kmem_cache *s, const void *object) in skip_orig_size_check() argument
1054 set_orig_size(s, (void *)object, s->object_size); in skip_orig_size_check()
1137 u8 *object, const char *reason) in object_err() argument
1143 print_trailer(s, slab, object); in object_err()
1189 static void init_object(struct kmem_cache *s, void *object, u8 val) in init_object() argument
1191 u8 *p = kasan_reset_tag(object); in init_object()
1209 poison_size = get_orig_size(s, object); in init_object()
1238 u8 *object, const char *what, u8 *start, unsigned int value, in check_bytes_and_report() argument
1262 object_err(s, slab, object, "Object corrupt"); in check_bytes_and_report()
1368 void *object, u8 val) in check_object() argument
1370 u8 *p = object; in check_object()
1371 u8 *endobject = object + s->object_size; in check_object()
1376 if (!check_bytes_and_report(s, slab, object, "Left Redzone", in check_object()
1377 object - s->red_left_pad, val, s->red_left_pad, ret)) in check_object()
1380 if (!check_bytes_and_report(s, slab, object, "Right Redzone", in check_object()
1385 orig_size = get_orig_size(s, object); in check_object()
1388 !check_bytes_and_report(s, slab, object, in check_object()
1485 void *object = NULL; in on_freelist() local
1493 if (object) { in on_freelist()
1494 object_err(s, slab, object, in on_freelist()
1496 set_freepointer(s, object, NULL); in on_freelist()
1506 object = fp; in on_freelist()
1507 fp = get_freepointer(s, object); in on_freelist()
1538 static void trace(struct kmem_cache *s, struct slab *slab, void *object, in trace() argument
1545 object, slab->inuse, in trace()
1549 print_section(KERN_INFO, "Object ", (void *)object, in trace()
1599 static void setup_object_debug(struct kmem_cache *s, void *object) in setup_object_debug() argument
1604 init_object(s, object, SLUB_RED_INACTIVE); in setup_object_debug()
1605 init_tracking(s, object); in setup_object_debug()
1620 struct slab *slab, void *object) in alloc_consistency_checks() argument
1625 if (!check_valid_pointer(s, slab, object)) { in alloc_consistency_checks()
1626 object_err(s, slab, object, "Freelist Pointer check fails"); in alloc_consistency_checks()
1630 if (!check_object(s, slab, object, SLUB_RED_INACTIVE)) in alloc_consistency_checks()
1637 struct slab *slab, void *object, int orig_size) in alloc_debug_processing() argument
1640 if (!alloc_consistency_checks(s, slab, object)) in alloc_debug_processing()
1645 trace(s, slab, object, 1); in alloc_debug_processing()
1646 set_orig_size(s, object, orig_size); in alloc_debug_processing()
1647 init_object(s, object, SLUB_RED_ACTIVE); in alloc_debug_processing()
1666 struct slab *slab, void *object, unsigned long addr) in free_consistency_checks() argument
1668 if (!check_valid_pointer(s, slab, object)) { in free_consistency_checks()
1669 slab_err(s, slab, "Invalid object pointer 0x%p", object); in free_consistency_checks()
1673 if (on_freelist(s, slab, object)) { in free_consistency_checks()
1674 object_err(s, slab, object, "Object already free"); in free_consistency_checks()
1678 if (!check_object(s, slab, object, SLUB_RED_ACTIVE)) in free_consistency_checks()
1684 object); in free_consistency_checks()
1687 object); in free_consistency_checks()
1689 object_err(s, slab, object, in free_consistency_checks()
1910 static inline void setup_object_debug(struct kmem_cache *s, void *object) {} in setup_object_debug() argument
1915 struct slab *slab, void *object, int orig_size) { return true; } in alloc_debug_processing() argument
1923 void *object, u8 val) { return 1; } in check_object() argument
1925 static inline void set_track(struct kmem_cache *s, void *object, in set_track() argument
2134 __alloc_tagging_slab_alloc_hook(struct kmem_cache *s, void *object, gfp_t flags) in __alloc_tagging_slab_alloc_hook() argument
2138 obj_exts = prepare_slab_obj_exts_hook(s, flags, object); in __alloc_tagging_slab_alloc_hook()
2149 alloc_tagging_slab_alloc_hook(struct kmem_cache *s, void *object, gfp_t flags) in alloc_tagging_slab_alloc_hook() argument
2152 __alloc_tagging_slab_alloc_hook(s, object, flags); in alloc_tagging_slab_alloc_hook()
2189 alloc_tagging_slab_alloc_hook(struct kmem_cache *s, void *object, gfp_t flags) in alloc_tagging_slab_alloc_hook() argument
2204 static void memcg_alloc_abort_single(struct kmem_cache *s, void *object);
2323 void *object; member
2380 delayed_free->object = x; in slab_free_hook()
2425 void *object; in slab_free_freelist_hook() local
2442 object = next; in slab_free_freelist_hook()
2443 next = get_freepointer(s, object); in slab_free_freelist_hook()
2446 if (likely(slab_free_hook(s, object, init, false))) { in slab_free_freelist_hook()
2448 set_freepointer(s, object, *head); in slab_free_freelist_hook()
2449 *head = object; in slab_free_freelist_hook()
2451 *tail = object; in slab_free_freelist_hook()
2459 } while (object != old_tail); in slab_free_freelist_hook()
2464 static void *setup_object(struct kmem_cache *s, void *object) in setup_object() argument
2466 setup_object_debug(s, object); in setup_object()
2467 object = kasan_init_slab_obj(s, object); in setup_object()
2469 kasan_unpoison_new_object(s, object); in setup_object()
2470 s->ctor(object); in setup_object()
2471 kasan_poison_new_object(s, object); in setup_object()
2473 return object; in setup_object()
2810 void *object; in alloc_single_from_partial() local
2814 object = slab->freelist; in alloc_single_from_partial()
2815 slab->freelist = get_freepointer(s, object); in alloc_single_from_partial()
2818 if (!alloc_debug_processing(s, slab, object, orig_size)) { in alloc_single_from_partial()
2829 return object; in alloc_single_from_partial()
2843 void *object; in alloc_single_from_new_slab() local
2846 object = slab->freelist; in alloc_single_from_new_slab()
2847 slab->freelist = get_freepointer(s, object); in alloc_single_from_new_slab()
2850 if (!alloc_debug_processing(s, slab, object, orig_size)) in alloc_single_from_new_slab()
2868 return object; in alloc_single_from_new_slab()
2905 void *object = alloc_single_from_partial(s, n, slab, in get_partial_node() local
2907 if (object) { in get_partial_node()
2909 pc->object = object; in get_partial_node()
3474 void *object = head; in free_debug_processing() local
3494 if (!free_consistency_checks(s, slab, object, addr)) in free_debug_processing()
3499 set_track_update(s, object, TRACK_FREE, addr, handle); in free_debug_processing()
3500 trace(s, slab, object, 0); in free_debug_processing()
3502 init_object(s, object, SLUB_RED_INACTIVE); in free_debug_processing()
3505 if (object != tail) { in free_debug_processing()
3506 object = get_freepointer(s, object); in free_debug_processing()
3521 slab_fix(s, "Object at 0x%p not freed", object); in free_debug_processing()
3874 freelist = pc.object; in ___slab_alloc()
3994 void *object; in __slab_alloc_node() local
4029 object = c->freelist; in __slab_alloc_node()
4055 unlikely(!object || !slab || !node_match(slab, node))) { in __slab_alloc_node()
4056 object = __slab_alloc(s, gfpflags, node, addr, c, orig_size); in __slab_alloc_node()
4058 void *next_object = get_freepointer_safe(s, object); in __slab_alloc_node()
4074 if (unlikely(!__update_cpu_freelist_fast(s, object, next_object, tid))) { in __slab_alloc_node()
4082 return object; in __slab_alloc_node()
4090 void *object; in __slab_alloc_node() local
4097 return pc.object; in __slab_alloc_node()
4105 object = alloc_single_from_new_slab(s, slab, orig_size); in __slab_alloc_node()
4107 return object; in __slab_alloc_node()
4206 void *object; in slab_alloc_node() local
4213 object = kfence_alloc(s, orig_size, gfpflags); in slab_alloc_node()
4214 if (unlikely(object)) in slab_alloc_node()
4217 object = __slab_alloc_node(s, gfpflags, node, addr, orig_size); in slab_alloc_node()
4219 maybe_wipe_obj_freeptr(s, object); in slab_alloc_node()
4229 slab_post_alloc_hook(s, lru, gfpflags, 1, &object, init, orig_size); in slab_alloc_node()
4231 return object; in slab_alloc_node()
4674 void slab_free(struct kmem_cache *s, struct slab *slab, void *object, in slab_free() argument
4677 memcg_slab_free_hook(s, slab, &object, 1); in slab_free()
4678 alloc_tagging_slab_free_hook(s, slab, &object, 1); in slab_free()
4680 if (likely(slab_free_hook(s, object, slab_want_init_on_free(s), false))) in slab_free()
4681 do_slab_free(s, slab, object, object, 1, addr); in slab_free()
4687 void memcg_alloc_abort_single(struct kmem_cache *s, void *object) in memcg_alloc_abort_single() argument
4689 if (likely(slab_free_hook(s, object, slab_want_init_on_free(s), false))) in memcg_alloc_abort_single()
4690 do_slab_free(s, virt_to_slab(object), object, object, 1, _RET_IP_); in memcg_alloc_abort_single()
4713 void *object = delayed_free->object; in slab_free_after_rcu_debug() local
4714 struct slab *slab = virt_to_slab(object); in slab_free_after_rcu_debug()
4719 if (WARN_ON(is_kfence_address(object))) in slab_free_after_rcu_debug()
4730 if (slab_free_hook(s, object, slab_want_init_on_free(s), true)) in slab_free_after_rcu_debug()
4731 do_slab_free(s, slab, object, object, 1, _THIS_IP_); in slab_free_after_rcu_debug()
4786 static void free_large_kmalloc(struct folio *folio, void *object) in free_large_kmalloc() argument
4796 pr_warn_once("object pointer: 0x%p\n", object); in free_large_kmalloc()
4798 kmemleak_free(object); in free_large_kmalloc()
4799 kasan_kfree_large(object); in free_large_kmalloc()
4800 kmsan_kfree_large(object); in free_large_kmalloc()
4859 void kfree(const void *object) in kfree() argument
4864 void *x = (void *)object; in kfree()
4866 trace_kfree(_RET_IP_, object); in kfree()
4868 if (unlikely(ZERO_OR_NULL_PTR(object))) in kfree()
4871 folio = virt_to_folio(object); in kfree()
4873 free_large_kmalloc(folio, (void *)object); in kfree()
5192 void *object; in build_detached_freelist() local
5196 object = p[--size]; in build_detached_freelist()
5197 folio = virt_to_folio(object); in build_detached_freelist()
5201 free_large_kmalloc(folio, object); in build_detached_freelist()
5210 df->s = cache_from_obj(s, object); /* Support for memcg */ in build_detached_freelist()
5214 df->tail = object; in build_detached_freelist()
5215 df->freelist = object; in build_detached_freelist()
5218 if (is_kfence_address(object)) in build_detached_freelist()
5221 set_freepointer(df->s, object, NULL); in build_detached_freelist()
5225 object = p[--size]; in build_detached_freelist()
5227 if (df->slab == virt_to_slab(object)) { in build_detached_freelist()
5229 set_freepointer(df->s, object, df->freelist); in build_detached_freelist()
5230 df->freelist = object; in build_detached_freelist()
5307 void *object = kfence_alloc(s, s->object_size, flags); in __kmem_cache_alloc_bulk() local
5309 if (unlikely(object)) { in __kmem_cache_alloc_bulk()
5310 p[i] = object; in __kmem_cache_alloc_bulk()
5314 object = c->freelist; in __kmem_cache_alloc_bulk()
5315 if (unlikely(!object)) { in __kmem_cache_alloc_bulk()
5343 c->freelist = get_freepointer(s, object); in __kmem_cache_alloc_bulk()
5344 p[i] = object; in __kmem_cache_alloc_bulk()
5367 void *object = kfence_alloc(s, s->object_size, flags); in __kmem_cache_alloc_bulk() local
5369 if (unlikely(object)) { in __kmem_cache_alloc_bulk()
5370 p[i] = object; in __kmem_cache_alloc_bulk()
5942 void __kmem_obj_info(struct kmem_obj_info *kpp, void *object, struct slab *slab) in __kmem_obj_info() argument
5952 kpp->kp_ptr = object; in __kmem_obj_info()
5956 objp0 = kasan_reset_tag(object); in __kmem_obj_info()