Lines Matching refs:list
47 static inline size_t btree_cache_can_free(struct btree_cache_list *list) in btree_cache_can_free() argument
49 struct btree_cache *bc = container_of(list, struct btree_cache, live[list->idx]); in btree_cache_can_free()
51 size_t can_free = list->nr; in btree_cache_can_free()
52 if (!list->idx) in btree_cache_can_free()
59 BUG_ON(!list_empty(&b->list)); in btree_node_to_freedlist()
62 list_add(&b->list, &bc->freed_pcpu); in btree_node_to_freedlist()
64 list_add(&b->list, &bc->freed_nonpcpu); in btree_node_to_freedlist()
69 BUG_ON(!list_empty(&b->list)); in __bch2_btree_node_to_freelist()
73 list_add(&b->list, &bc->freeable); in __bch2_btree_node_to_freelist()
90 BUG_ON(!list_empty(&b->list)); in __btree_node_data_free()
119 BUG_ON(list_empty(&b->list)); in btree_node_data_free()
120 list_del_init(&b->list); in btree_node_data_free()
181 INIT_LIST_HEAD(&b->list); in __btree_node_mem_alloc()
220 list_move(&b->list, &bc->live[1].list); in bch2_node_pin()
236 list_for_each_entry_safe(b, n, &bc->live[1].list, list) { in bch2_btree_cache_unpin()
238 list_move(&b->list, &bc->live[0].list); in bch2_btree_cache_unpin()
261 list_del_init(&b->list); in __bch2_btree_node_hash_remove()
272 BUG_ON(!list_empty(&b->list)); in __bch2_btree_node_hash_insert()
287 list_add_tail(&b->list, &bc->live[p].list); in __bch2_btree_node_hash_insert()
460 struct btree_cache_list *list = shrink->private_data; in bch2_btree_cache_scan() local
461 struct btree_cache *bc = container_of(list, struct btree_cache, live[list->idx]); in bch2_btree_cache_scan()
470 bool trigger_writes = atomic_long_read(&bc->nr_dirty) + nr >= list->nr * 3 / 4; in bch2_btree_cache_scan()
485 can_free = btree_cache_can_free(list); in bch2_btree_cache_scan()
492 list_for_each_entry_safe(b, t, &bc->freeable, list) { in bch2_btree_cache_scan()
514 list_for_each_entry_safe(b, t, &list->list, list) { in bch2_btree_cache_scan()
539 list_move(&list->list, &b->list); in bch2_btree_cache_scan()
553 if (&t->list != &list->list) in bch2_btree_cache_scan()
554 list_move_tail(&list->list, &t->list); in bch2_btree_cache_scan()
567 struct btree_cache_list *list = shrink->private_data; in bch2_btree_cache_count() local
572 return btree_cache_can_free(list); in bch2_btree_cache_count()
589 list_move(&c->verify_data->list, &bc->live[0].list); in bch2_fs_btree_cache_exit()
597 list_add(&r->b->list, &bc->live[0].list); in bch2_fs_btree_cache_exit()
600 list_for_each_entry_safe(b, t, &bc->live[1].list, list) in bch2_fs_btree_cache_exit()
602 list_for_each_entry_safe(b, t, &bc->live[0].list, list) in bch2_fs_btree_cache_exit()
605 list_for_each_entry_safe(b, t, &bc->freeable, list) { in bch2_fs_btree_cache_exit()
618 list_for_each_entry_safe(b, t, &bc->freed_nonpcpu, list) { in bch2_fs_btree_cache_exit()
619 list_del(&b->list); in bch2_fs_btree_cache_exit()
659 list_splice_init(&bc->live[0].list, &bc->freeable); in bch2_fs_btree_cache_init()
693 INIT_LIST_HEAD(&bc->live[i].list); in bch2_fs_btree_cache_init_early()
757 list_for_each_entry_reverse(b, &bc->live[i].list, list) in btree_node_cannibalize()
763 list_for_each_entry_reverse(b, &bc->live[i].list, list) in btree_node_cannibalize()
792 list_for_each_entry(b, freed, list) in bch2_btree_node_mem_alloc()
794 list_del_init(&b->list); in bch2_btree_node_mem_alloc()
819 list_for_each_entry(b2, &bc->freeable, list) in bch2_btree_node_mem_alloc()
824 list_del_init(&b2->list); in bch2_btree_node_mem_alloc()
843 BUG_ON(!list_empty(&b->list)); in bch2_btree_node_mem_alloc()
885 BUG_ON(!list_empty(&b->list)); in bch2_btree_node_mem_alloc()