Lines Matching refs:t
294 struct bset_tree *t = b->set; in bch_btree_keys_free() local
297 kfree(t->prev); in bch_btree_keys_free()
299 free_pages((unsigned long) t->prev, in bch_btree_keys_free()
303 kfree(t->tree); in bch_btree_keys_free()
305 free_pages((unsigned long) t->tree, in bch_btree_keys_free()
308 free_pages((unsigned long) t->data, b->page_order); in bch_btree_keys_free()
310 t->prev = NULL; in bch_btree_keys_free()
311 t->tree = NULL; in bch_btree_keys_free()
312 t->data = NULL; in bch_btree_keys_free()
319 struct bset_tree *t = b->set; in bch_btree_keys_alloc() local
321 BUG_ON(t->data); in bch_btree_keys_alloc()
325 t->data = (void *) __get_free_pages(__GFP_COMP|gfp, b->page_order); in bch_btree_keys_alloc()
326 if (!t->data) in bch_btree_keys_alloc()
329 t->tree = bset_tree_bytes(b) < PAGE_SIZE in bch_btree_keys_alloc()
332 if (!t->tree) in bch_btree_keys_alloc()
335 t->prev = bset_prev_bytes(b) < PAGE_SIZE in bch_btree_keys_alloc()
338 if (!t->prev) in bch_btree_keys_alloc()
437 static unsigned int to_inorder(unsigned int j, struct bset_tree *t) in to_inorder() argument
439 return __to_inorder(j, t->size, t->extra); in to_inorder()
463 static unsigned int inorder_to_tree(unsigned int j, struct bset_tree *t) in inorder_to_tree() argument
465 return __inorder_to_tree(j, t->size, t->extra); in inorder_to_tree()
525 static struct bkey *cacheline_to_bkey(struct bset_tree *t, in cacheline_to_bkey() argument
529 return ((void *) t->data) + cacheline * BSET_CACHELINE + offset * 8; in cacheline_to_bkey()
532 static unsigned int bkey_to_cacheline(struct bset_tree *t, struct bkey *k) in bkey_to_cacheline() argument
534 return ((void *) k - (void *) t->data) / BSET_CACHELINE; in bkey_to_cacheline()
537 static unsigned int bkey_to_cacheline_offset(struct bset_tree *t, in bkey_to_cacheline_offset() argument
541 return (u64 *) k - (u64 *) cacheline_to_bkey(t, cacheline, 0); in bkey_to_cacheline_offset()
544 static struct bkey *tree_to_bkey(struct bset_tree *t, unsigned int j) in tree_to_bkey() argument
546 return cacheline_to_bkey(t, to_inorder(j, t), t->tree[j].m); in tree_to_bkey()
549 static struct bkey *tree_to_prev_bkey(struct bset_tree *t, unsigned int j) in tree_to_prev_bkey() argument
551 return (void *) (((uint64_t *) tree_to_bkey(t, j)) - t->prev[j]); in tree_to_prev_bkey()
558 static struct bkey *table_to_bkey(struct bset_tree *t, unsigned int cacheline) in table_to_bkey() argument
560 return cacheline_to_bkey(t, cacheline, t->prev[cacheline]); in table_to_bkey()
592 static void make_bfloat(struct bset_tree *t, unsigned int j) in make_bfloat() argument
594 struct bkey_float *f = &t->tree[j]; in make_bfloat()
595 struct bkey *m = tree_to_bkey(t, j); in make_bfloat()
596 struct bkey *p = tree_to_prev_bkey(t, j); in make_bfloat()
599 ? t->data->start in make_bfloat()
600 : tree_to_prev_bkey(t, j >> ffs(j)); in make_bfloat()
603 ? bset_bkey_idx(t->data, t->data->keys - bkey_u64s(&t->end)) in make_bfloat()
604 : tree_to_bkey(t, j >> (ffz(j) + 1)); in make_bfloat()
637 static void bset_alloc_tree(struct btree_keys *b, struct bset_tree *t) in bset_alloc_tree() argument
639 if (t != b->set) { in bset_alloc_tree()
640 unsigned int j = roundup(t[-1].size, in bset_alloc_tree()
643 t->tree = t[-1].tree + j; in bset_alloc_tree()
644 t->prev = t[-1].prev + j; in bset_alloc_tree()
647 while (t < b->set + MAX_BSETS) in bset_alloc_tree()
648 t++->size = 0; in bset_alloc_tree()
653 struct bset_tree *t = bset_tree_last(b); in bch_bset_build_unwritten_tree() local
658 bset_alloc_tree(b, t); in bch_bset_build_unwritten_tree()
660 if (t->tree != b->set->tree + btree_keys_cachelines(b)) { in bch_bset_build_unwritten_tree()
661 t->prev[0] = bkey_to_cacheline_offset(t, 0, t->data->start); in bch_bset_build_unwritten_tree()
662 t->size = 1; in bch_bset_build_unwritten_tree()
692 struct bset_tree *t = bset_tree_last(b); in bch_bset_build_written_tree() local
693 struct bkey *prev = NULL, *k = t->data->start; in bch_bset_build_written_tree()
698 bset_alloc_tree(b, t); in bch_bset_build_written_tree()
700 t->size = min_t(unsigned int, in bch_bset_build_written_tree()
701 bkey_to_cacheline(t, bset_bkey_last(t->data)), in bch_bset_build_written_tree()
702 b->set->tree + btree_keys_cachelines(b) - t->tree); in bch_bset_build_written_tree()
704 if (t->size < 2) { in bch_bset_build_written_tree()
705 t->size = 0; in bch_bset_build_written_tree()
709 t->extra = (t->size - rounddown_pow_of_two(t->size - 1)) << 1; in bch_bset_build_written_tree()
712 for (j = inorder_next(0, t->size); in bch_bset_build_written_tree()
714 j = inorder_next(j, t->size)) { in bch_bset_build_written_tree()
715 while (bkey_to_cacheline(t, k) < cacheline) { in bch_bset_build_written_tree()
720 t->prev[j] = bkey_u64s(prev); in bch_bset_build_written_tree()
721 t->tree[j].m = bkey_to_cacheline_offset(t, cacheline++, k); in bch_bset_build_written_tree()
724 while (bkey_next(k) != bset_bkey_last(t->data)) in bch_bset_build_written_tree()
727 t->end = *k; in bch_bset_build_written_tree()
730 for (j = inorder_next(0, t->size); in bch_bset_build_written_tree()
732 j = inorder_next(j, t->size)) in bch_bset_build_written_tree()
733 make_bfloat(t, j); in bch_bset_build_written_tree()
740 struct bset_tree *t; in bch_bset_fix_invalidated_key() local
743 for (t = b->set; t <= bset_tree_last(b); t++) in bch_bset_fix_invalidated_key()
744 if (k < bset_bkey_last(t->data)) in bch_bset_fix_invalidated_key()
749 if (!t->size || !bset_written(b, t)) in bch_bset_fix_invalidated_key()
752 inorder = bkey_to_cacheline(t, k); in bch_bset_fix_invalidated_key()
754 if (k == t->data->start) in bch_bset_fix_invalidated_key()
757 if (bkey_next(k) == bset_bkey_last(t->data)) { in bch_bset_fix_invalidated_key()
758 t->end = *k; in bch_bset_fix_invalidated_key()
762 j = inorder_to_tree(inorder, t); in bch_bset_fix_invalidated_key()
765 j < t->size && in bch_bset_fix_invalidated_key()
766 k == tree_to_bkey(t, j)) in bch_bset_fix_invalidated_key()
768 make_bfloat(t, j); in bch_bset_fix_invalidated_key()
770 } while (j < t->size); in bch_bset_fix_invalidated_key()
772 j = inorder_to_tree(inorder + 1, t); in bch_bset_fix_invalidated_key()
775 j < t->size && in bch_bset_fix_invalidated_key()
776 k == tree_to_prev_bkey(t, j)) in bch_bset_fix_invalidated_key()
778 make_bfloat(t, j); in bch_bset_fix_invalidated_key()
780 } while (j < t->size); in bch_bset_fix_invalidated_key()
784 struct bset_tree *t, in bch_bset_fix_lookup_table() argument
788 unsigned int j = bkey_to_cacheline(t, k); in bch_bset_fix_lookup_table()
791 if (!t->size) in bch_bset_fix_lookup_table()
799 while (j < t->size && in bch_bset_fix_lookup_table()
800 table_to_bkey(t, j) <= k) in bch_bset_fix_lookup_table()
807 for (; j < t->size; j++) { in bch_bset_fix_lookup_table()
808 t->prev[j] += shift; in bch_bset_fix_lookup_table()
810 if (t->prev[j] > 7) { in bch_bset_fix_lookup_table()
811 k = table_to_bkey(t, j - 1); in bch_bset_fix_lookup_table()
813 while (k < cacheline_to_bkey(t, j, 0)) in bch_bset_fix_lookup_table()
816 t->prev[j] = bkey_to_cacheline_offset(t, j, k); in bch_bset_fix_lookup_table()
820 if (t->size == b->set->tree + btree_keys_cachelines(b) - t->tree) in bch_bset_fix_lookup_table()
825 for (k = table_to_bkey(t, t->size - 1); in bch_bset_fix_lookup_table()
826 k != bset_bkey_last(t->data); in bch_bset_fix_lookup_table()
828 if (t->size == bkey_to_cacheline(t, k)) { in bch_bset_fix_lookup_table()
829 t->prev[t->size] = in bch_bset_fix_lookup_table()
830 bkey_to_cacheline_offset(t, t->size, k); in bch_bset_fix_lookup_table()
831 t->size++; in bch_bset_fix_lookup_table()
860 struct bset_tree *t = bset_tree_last(b); in bch_bset_insert() local
863 BUG_ON(bset_byte_offset(b, t->data) + in bch_bset_insert()
864 __set_bytes(t->data, t->data->keys + bkey_u64s(insert)) > in bch_bset_insert()
869 (void *) bset_bkey_last(t->data) - (void *) where); in bch_bset_insert()
871 t->data->keys += bkey_u64s(insert); in bch_bset_insert()
873 bch_bset_fix_lookup_table(b, t, where); in bch_bset_insert()
939 static struct bset_search_iter bset_search_write_set(struct bset_tree *t, in bset_search_write_set() argument
942 unsigned int li = 0, ri = t->size; in bset_search_write_set()
947 if (bkey_cmp(table_to_bkey(t, m), search) > 0) in bset_search_write_set()
954 table_to_bkey(t, li), in bset_search_write_set()
955 ri < t->size ? table_to_bkey(t, ri) : bset_bkey_last(t->data) in bset_search_write_set()
959 static struct bset_search_iter bset_search_tree(struct bset_tree *t, in bset_search_tree() argument
969 if (p < t->size) in bset_search_tree()
970 prefetch(&t->tree[p]); in bset_search_tree()
973 f = &t->tree[j]; in bset_search_tree()
981 if (bkey_cmp(tree_to_bkey(t, j), search) > 0) in bset_search_tree()
986 } while (n < t->size); in bset_search_tree()
988 inorder = to_inorder(j, t); in bset_search_tree()
995 l = cacheline_to_bkey(t, inorder, f->m); in bset_search_tree()
997 if (++inorder != t->size) { in bset_search_tree()
998 f = &t->tree[inorder_next(j, t->size)]; in bset_search_tree()
999 r = cacheline_to_bkey(t, inorder, f->m); in bset_search_tree()
1001 r = bset_bkey_last(t->data); in bset_search_tree()
1003 r = cacheline_to_bkey(t, inorder, f->m); in bset_search_tree()
1006 f = &t->tree[inorder_prev(j, t->size)]; in bset_search_tree()
1007 l = cacheline_to_bkey(t, inorder, f->m); in bset_search_tree()
1009 l = t->data->start; in bset_search_tree()
1015 struct bkey *__bch_bset_search(struct btree_keys *b, struct bset_tree *t, in __bch_bset_search() argument
1035 if (unlikely(!t->size)) { in __bch_bset_search()
1036 i.l = t->data->start; in __bch_bset_search()
1037 i.r = bset_bkey_last(t->data); in __bch_bset_search()
1038 } else if (bset_written(b, t)) { in __bch_bset_search()
1046 if (unlikely(bkey_cmp(search, &t->end) >= 0)) in __bch_bset_search()
1047 return bset_bkey_last(t->data); in __bch_bset_search()
1049 if (unlikely(bkey_cmp(search, t->data->start) < 0)) in __bch_bset_search()
1050 return t->data->start; in __bch_bset_search()
1052 i = bset_search_tree(t, search); in __bch_bset_search()
1055 t->size < bkey_to_cacheline(t, bset_bkey_last(t->data))); in __bch_bset_search()
1057 i = bset_search_write_set(t, search); in __bch_bset_search()
1061 BUG_ON(bset_written(b, t) && in __bch_bset_search()
1062 i.l != t->data->start && in __bch_bset_search()
1063 bkey_cmp(tree_to_prev_bkey(t, in __bch_bset_search()
1064 inorder_to_tree(bkey_to_cacheline(t, i.l), t)), in __bch_bset_search()
1067 BUG_ON(i.r != bset_bkey_last(t->data) && in __bch_bset_search()
1372 struct bset_tree *t = &b->set[i]; in bch_btree_keys_stats() local
1373 size_t bytes = t->data->keys * sizeof(uint64_t); in bch_btree_keys_stats()
1376 if (bset_written(b, t)) { in bch_btree_keys_stats()
1380 stats->floats += t->size - 1; in bch_btree_keys_stats()
1382 for (j = 1; j < t->size; j++) in bch_btree_keys_stats()
1383 if (t->tree[j].exponent == 127) in bch_btree_keys_stats()