Lines Matching refs:cache
76 uint8_t bch_inc_gen(struct cache *ca, struct bucket *b) in bch_inc_gen()
88 struct cache *ca; in bch_rescale_priorities()
90 unsigned long next = c->nbuckets * c->cache->sb.bucket_size / 1024; in bch_rescale_priorities()
106 ca = c->cache; in bch_rescale_priorities()
130 bool bch_can_invalidate_bucket(struct cache *ca, struct bucket *b) in bch_can_invalidate_bucket()
137 void __bch_invalidate_one_bucket(struct cache *ca, struct bucket *b) in __bch_invalidate_one_bucket()
151 static void bch_invalidate_one_bucket(struct cache *ca, struct bucket *b) in bch_invalidate_one_bucket()
177 static void invalidate_buckets_lru(struct cache *ca) in invalidate_buckets_lru()
214 static void invalidate_buckets_fifo(struct cache *ca) in invalidate_buckets_fifo()
237 static void invalidate_buckets_random(struct cache *ca) in invalidate_buckets_random()
263 static void invalidate_buckets(struct cache *ca) in invalidate_buckets()
300 static int bch_allocator_push(struct cache *ca, long bucket) in bch_allocator_push()
317 struct cache *ca = arg; in bch_allocator_thread()
389 long bch_bucket_alloc(struct cache *ca, unsigned int reserve, bool wait) in bch_bucket_alloc()
466 void __bch_bucket_free(struct cache *ca, struct bucket *b) in __bch_bucket_free()
482 __bch_bucket_free(c->cache, PTR_BUCKET(c, k, i)); in bch_bucket_free()
488 struct cache *ca; in __bch_bucket_alloc_set()
499 ca = c->cache; in __bch_bucket_alloc_set()
578 ret->sectors_free = c->cache->sb.bucket_size; in pick_data_bucket()
669 &c->cache->sectors_written); in bch_alloc_sectors()
672 if (b->sectors_free < c->cache->sb.block_size) in bch_alloc_sectors()
720 int bch_cache_allocator_start(struct cache *ca) in bch_cache_allocator_start()