Lines Matching refs:objects

834 	bitmap_zero(obj_map, slab->objects);  in __fill_map()
937 if (object < base || object >= base + slab->objects * s->size || in check_valid_pointer()
1048 slab, slab->objects, slab->inuse, slab->freelist, in print_slab_info()
1457 if (slab->objects > maxobj) { in check_slab()
1459 slab->objects, maxobj); in check_slab()
1462 if (slab->inuse > slab->objects) { in check_slab()
1464 slab->inuse, slab->objects); in check_slab()
1489 while (fp && nr <= slab->objects) { in on_freelist()
1501 slab->inuse = slab->objects; in on_freelist()
1511 if (nr > slab->objects) { in on_freelist()
1514 slab->inuse = slab->objects; in on_freelist()
1523 if (slab->objects != max_objects) { in on_freelist()
1525 slab->objects, max_objects); in on_freelist()
1526 slab->objects = max_objects; in on_freelist()
1529 if (slab->inuse != slab->objects - nr) { in on_freelist()
1531 slab->inuse, slab->objects - nr); in on_freelist()
1532 slab->inuse = slab->objects - nr; in on_freelist()
1583 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() argument
1588 atomic_long_add(objects, &n->total_objects); in inc_slabs_node()
1590 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() argument
1595 atomic_long_sub(objects, &n->total_objects); in dec_slabs_node()
1658 slab->inuse = slab->objects; in alloc_debug_processing()
1942 int objects) {} in inc_slabs_node() argument
1944 int objects) {} in dec_slabs_node() argument
1980 struct slabobj_ext *vec, unsigned int objects) in handle_failed_objexts_alloc() argument
1990 for (i = 0; i < objects; i++) in handle_failed_objexts_alloc()
2000 struct slabobj_ext *vec, unsigned int objects) {} in handle_failed_objexts_alloc() argument
2020 unsigned int objects = objs_per_slab(s, slab); in alloc_slab_obj_exts() local
2028 vec = kcalloc_node(objects, sizeof(struct slabobj_ext), gfp, in alloc_slab_obj_exts()
2043 handle_failed_objexts_alloc(old_exts, vec, objects); in alloc_slab_obj_exts()
2158 int objects) in __alloc_tagging_slab_free_hook() argument
2171 for (i = 0; i < objects; i++) { in __alloc_tagging_slab_free_hook()
2180 int objects) in alloc_tagging_slab_free_hook() argument
2183 __alloc_tagging_slab_free_hook(s, slab, p, objects); in alloc_tagging_slab_free_hook()
2195 int objects) in alloc_tagging_slab_free_hook() argument
2231 int objects) in memcg_slab_free_hook() argument
2242 __memcg_slab_free_hook(s, slab, p, objects, obj_exts); in memcg_slab_free_hook()
2308 void **p, int objects) in memcg_slab_free_hook() argument
2573 if (slab->objects < 2 || !s->random_seq) in shuffle_freelist()
2579 page_limit = slab->objects * s->size; in shuffle_freelist()
2587 for (idx = 1; idx < slab->objects; idx++) { in shuffle_freelist()
2669 slab->objects = oo_objects(oo); in allocate_slab()
2690 for (idx = 0, p = start; idx < slab->objects - 1; idx++) { in allocate_slab()
2740 for_each_object(p, s, slab_address(slab), slab->objects) in free_slab()
2752 dec_slabs_node(s, slab_nid(slab), slab->objects); in discard_slab()
2824 if (slab->inuse == slab->objects) { in alloc_single_from_partial()
2860 if (slab->inuse == slab->objects) in alloc_single_from_new_slab()
2865 inc_slabs_node(s, nid, slab->objects); in alloc_single_from_new_slab()
3460 return slab->objects - slab->inuse; in count_free()
3555 x += slab->objects - slab->inuse; in count_partial_free_approx()
3565 x += slab->objects - slab->inuse; in count_partial_free_approx()
3570 x += slab->objects - slab->inuse; in count_partial_free_approx()
3664 new.inuse = slab->objects; in get_freelist()
3691 new.inuse = slab->objects; in freeze_slab()
3924 slab->inuse = slab->objects; in ___slab_alloc()
3927 inc_slabs_node(s, slab_nid(slab), slab->objects); in ___slab_alloc()
4463 dec_slabs_node(s, slab_nid(slab_free), slab_free->objects); in free_to_partial_list()
5629 inc_slabs_node(kmem_cache_node, node, slab->objects); in early_kmem_cache_node_alloc()
5871 for_each_object(p, s, addr, slab->objects) { in list_slab_objects()
5966 if (WARN_ON_ONCE(objp < base || objp >= base + slab->objects * s->size in __kmem_obj_info()
6146 int free = slab->objects - slab->inuse; in __kmem_cache_do_shrink()
6154 if (free == slab->objects) { in __kmem_cache_do_shrink()
6158 dec_slabs_node(s, node, slab->objects); in __kmem_cache_do_shrink()
6491 return slab->objects; in count_total()
6507 for_each_object(p, s, addr, slab->objects) { in validate_slab()
6732 for_each_object(p, s, addr, slab->objects) in process_slab()
6784 x = slab->objects; in show_slab_objects()
6946 unsigned int objects; in cpu_partial_store() local
6949 err = kstrtouint(buf, 10, &objects); in cpu_partial_store()
6952 if (objects && !kmem_cache_has_cpu_partial(s)) in cpu_partial_store()
6955 slub_set_cpu_partial(s, objects); in cpu_partial_store()
6995 int objects = 0; in slabs_cpu_partial_show() local
7012 objects = (slabs * oo_objects(s->oo)) / 2; in slabs_cpu_partial_show()
7013 len += sysfs_emit_at(buf, len, "%d(%d)", objects, slabs); in slabs_cpu_partial_show()
7022 objects = (slabs * oo_objects(s->oo)) / 2; in slabs_cpu_partial_show()
7024 cpu, objects, slabs); in slabs_cpu_partial_show()
7085 SLAB_ATTR_RO(objects);