Lines Matching refs:z
492 struct rt_slab_zone *z; in rt_slab_alloc() local
543 if ((z = slab->zone_array[zi]) != RT_NULL) in rt_slab_alloc()
545 RT_ASSERT(z->z_nfree > 0); in rt_slab_alloc()
548 if (--z->z_nfree == 0) in rt_slab_alloc()
550 slab->zone_array[zi] = z->z_next; in rt_slab_alloc()
551 z->z_next = RT_NULL; in rt_slab_alloc()
560 if (z->z_uindex + 1 != z->z_nmax) in rt_slab_alloc()
562 z->z_uindex = z->z_uindex + 1; in rt_slab_alloc()
563 chunk = (struct rt_slab_chunk *)(z->z_baseptr + z->z_uindex * size); in rt_slab_alloc()
568 chunk = z->z_freechunk; in rt_slab_alloc()
571 z->z_freechunk = z->z_freechunk->c_next; in rt_slab_alloc()
574 slab->parent.used += z->z_chunksize; in rt_slab_alloc()
592 if ((z = slab->zone_free) != RT_NULL) in rt_slab_alloc()
595 slab->zone_free = z->z_next; in rt_slab_alloc()
601 z = rt_slab_page_alloc(m, slab->zone_size / RT_MM_PAGE_SIZE); in rt_slab_alloc()
602 if (z == RT_NULL) in rt_slab_alloc()
608 (rt_uintptr_t)z); in rt_slab_alloc()
611 for (off = 0, kup = btokup(z); off < slab->zone_page_cnt; off ++) in rt_slab_alloc()
621 rt_memset(z, 0, sizeof(struct rt_slab_zone)); in rt_slab_alloc()
635 z->z_magic = ZALLOC_SLAB_MAGIC; in rt_slab_alloc()
636 z->z_zoneindex = zi; in rt_slab_alloc()
637 z->z_nmax = (slab->zone_size - off) / size; in rt_slab_alloc()
638 z->z_nfree = z->z_nmax - 1; in rt_slab_alloc()
639 z->z_baseptr = (rt_uint8_t *)z + off; in rt_slab_alloc()
640 z->z_uindex = 0; in rt_slab_alloc()
641 z->z_chunksize = size; in rt_slab_alloc()
643 chunk = (struct rt_slab_chunk *)(z->z_baseptr + z->z_uindex * size); in rt_slab_alloc()
646 z->z_next = slab->zone_array[zi]; in rt_slab_alloc()
647 slab->zone_array[zi] = z; in rt_slab_alloc()
649 slab->parent.used += z->z_chunksize; in rt_slab_alloc()
672 struct rt_slab_zone *z; in rt_slab_realloc() local
704 z = (struct rt_slab_zone *)(((rt_uintptr_t)ptr & ~RT_MM_PAGE_MASK) - in rt_slab_realloc()
706 RT_ASSERT(z->z_magic == ZALLOC_SLAB_MAGIC); in rt_slab_realloc()
709 if (z->z_chunksize == size) in rt_slab_realloc()
720 rt_memcpy(nptr, ptr, size > z->z_chunksize ? z->z_chunksize : size); in rt_slab_realloc()
740 struct rt_slab_zone *z; in rt_slab_free() local
782 z = (struct rt_slab_zone *)(((rt_uintptr_t)ptr & ~RT_MM_PAGE_MASK) - in rt_slab_free()
784 RT_ASSERT(z->z_magic == ZALLOC_SLAB_MAGIC); in rt_slab_free()
787 chunk->c_next = z->z_freechunk; in rt_slab_free()
788 z->z_freechunk = chunk; in rt_slab_free()
790 slab->parent.used -= z->z_chunksize; in rt_slab_free()
796 if (z->z_nfree++ == 0) in rt_slab_free()
798 z->z_next = slab->zone_array[z->z_zoneindex]; in rt_slab_free()
799 slab->zone_array[z->z_zoneindex] = z; in rt_slab_free()
808 if (z->z_nfree == z->z_nmax && in rt_slab_free()
809 (z->z_next || slab->zone_array[z->z_zoneindex] != z)) in rt_slab_free()
814 (rt_uintptr_t)z, z->z_zoneindex); in rt_slab_free()
817 for (pz = &slab->zone_array[z->z_zoneindex]; z != *pz; pz = &(*pz)->z_next) in rt_slab_free()
819 *pz = z->z_next; in rt_slab_free()
822 z->z_magic = RT_UINT32_MAX; in rt_slab_free()
825 z->z_next = slab->zone_free; in rt_slab_free()
826 slab->zone_free = z; in rt_slab_free()
835 z = slab->zone_free; in rt_slab_free()
836 slab->zone_free = z->z_next; in rt_slab_free()
840 for (i = 0, kup = btokup(z); i < slab->zone_page_cnt; i ++) in rt_slab_free()
848 rt_slab_page_free(m, z, slab->zone_size / RT_MM_PAGE_SIZE); in rt_slab_free()