Lines Matching refs:tree

95 #define mas_tree_parent(x) ((unsigned long)(x->tree) | MA_ROOT_PARENT)
226 return mt_height(mas->tree); in mas_mt_height()
481 if (mt_is_alloc(mas->tree)) in mas_parent_type()
819 return mt_slot_locked(mas->tree, slots, offset); in mas_slot_locked()
833 return mt_slot(mas->tree, slots, offset); in mas_slot()
844 return rcu_dereference_check(mas->tree->ma_root, mt_locked(mas->tree)); in mas_root()
860 return mt_root_locked(mas->tree); in mas_root_locked()
999 bool in_rcu = mt_in_rcu(mas->tree); in mas_mat_destroy()
1004 mt_destroy_walk(mat->head, mas->tree, !in_rcu); in mas_mat_destroy()
1318 if (mt_in_rcu(mas->tree)) in mas_free()
1677 if (!mt_is_alloc(mas->tree)) in mas_update_gap()
1725 __must_hold(mas->tree->ma_lock) in mas_put_in_tree()
1732 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node)); in mas_put_in_tree()
1733 mt_set_height(mas->tree, new_height); in mas_put_in_tree()
1755 __must_hold(mas->tree->ma_lock) in mas_replace_node()
1767 __must_hold(mas->tree->ma_lock) in mas_find_child()
1966 if (!ma_is_leaf(mt) && mt_is_alloc(mas->tree)) { in mas_mab_cp()
2023 if (likely(!ma_is_leaf(mt) && mt_is_alloc(mas->tree))) { in mab_mas_cp()
2166 MA_STATE(parent, mas->tree, mas->index, mas->last); in mas_next_sibling()
2412 if (mt_is_alloc(mas->tree)) in mab_set_b_end()
2554 MA_TOPIARY(subtrees, mas->tree); in mas_topiary_replace()
2600 in_rcu = mt_in_rcu(mas->tree); in mas_topiary_replace()
2797 next = mt_slot(mas->tree, slots, offset); in mtree_range_walk()
2841 MA_STATE(l_mas, mas->tree, mas->index, mas->index); in mas_spanning_rebalance()
2842 MA_STATE(r_mas, mas->tree, mas->index, mas->last); in mas_spanning_rebalance()
2843 MA_STATE(m_mas, mas->tree, mas->index, mas->index); in mas_spanning_rebalance()
2976 MA_STATE(l_mas, mas->tree, mas->index, mas->last); in mas_rebalance()
2977 MA_STATE(r_mas, mas->tree, mas->index, mas->last); in mas_rebalance()
3031 bool in_rcu = mt_in_rcu(mas->tree); in mas_destroy_rebalance()
3034 MA_STATE(l_mas, mas->tree, mas->index, mas->last); in mas_destroy_rebalance()
3145 if (mt_is_alloc(mas->tree)) in mas_split_final_node()
3249 MA_STATE(tmp_mas, mas->tree, mas->index, mas->last); in mas_push_data()
3335 MA_STATE(l_mas, mas->tree, mas->index, mas->last); in mas_split()
3336 MA_STATE(r_mas, mas->tree, mas->index, mas->last); in mas_split()
3337 MA_STATE(prev_l_mas, mas->tree, mas->index, mas->last); in mas_split()
3338 MA_STATE(prev_r_mas, mas->tree, mas->index, mas->last); in mas_split()
3449 mt_set_height(mas->tree, 1); in mas_root_expand()
3452 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node)); in mas_root_expand()
3468 rcu_assign_pointer(mas->tree->ma_root, NULL); in mas_store_root()
3474 rcu_assign_pointer(mas->tree->ma_root, entry); in mas_store_root()
3671 next = mt_slot(mas->tree, slots, offset); in mtree_lookup_walk()
3703 mt_set_height(mas->tree, 0); in mas_new_root()
3704 rcu_assign_pointer(mas->tree->ma_root, entry); in mas_new_root()
3717 mt_set_height(mas->tree, 1); in mas_new_root()
3718 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node)); in mas_new_root()
3722 mte_destroy_walk(root, mas->tree); in mas_new_root()
3835 bool in_rcu = mt_in_rcu(mas->tree); in mas_wr_node_store()
3914 gap |= !mt_slot_locked(mas->tree, slots, offset); in mas_wr_slot_store()
3915 gap |= !mt_slot_locked(mas->tree, slots, offset + 1); in mas_wr_slot_store()
3929 WARN_ON_ONCE(mt_in_rcu(mas->tree)); in mas_wr_slot_store()
3934 gap |= !mt_slot_locked(mas->tree, slots, offset + 2); in mas_wr_slot_store()
4120 MT_BUG_ON(mas->tree, 1); in mas_wr_store_entry()
4206 ret = mt_in_rcu(mas->tree) ? 1 : 0; in mas_prealloc_calc()
4266 if (!mt_in_rcu(mas->tree) && (mas->offset == mas->end)) in mas_wr_store_type()
4269 if ((new_end == mas->end) && (!mt_in_rcu(mas->tree) || in mas_wr_store_type()
4374 if ((mas->tree->ma_flags & MT_FLAGS_ALLOC_WRAPPED) && ret == 0) { in mas_alloc_cyclic()
4375 mas->tree->ma_flags &= ~MT_FLAGS_ALLOC_WRAPPED; in mas_alloc_cyclic()
4396 mas->tree->ma_flags |= MT_FLAGS_ALLOC_WRAPPED; in mas_alloc_cyclic()
4746 entry = mt_slot(mas->tree, slots, mas->offset); in mas_next_slot()
5641 if (!mt_is_alloc(mas->tree)) in mas_expected_entries()
6283 __must_hold(mas->tree->ma_lock) in mas_nomem()
6288 if (gfpflags_allow_blocking(gfp) && !mt_external_lock(mas->tree)) { in mas_nomem()
6289 mtree_unlock(mas->tree); in mas_nomem()
6291 mtree_lock(mas->tree); in mas_nomem()
6700 val = (unsigned long)mt_slot_locked(mas->tree, slots, i); in mas_dup_alloc()
6727 if (unlikely(mt_attr(mas->tree) != mt_attr(new_mas->tree)) || in mas_dup_build()
6728 unlikely(!mtree_empty(new_mas->tree))) { in mas_dup_build()
6787 new_mas->tree->ma_flags = mas->tree->ma_flags; in mas_dup_build()
6788 rcu_assign_pointer(new_mas->tree->ma_root, root); in mas_dup_build()
7342 MT_BUG_ON(mas->tree, !entry); in mas_validate_gaps()
7348 MT_BUG_ON(mas->tree, gap > p_end - p_start + 1); in mas_validate_gaps()
7362 MT_BUG_ON(mas->tree, !gaps); in mas_validate_gaps()
7366 MT_BUG_ON(mas->tree, 1); in mas_validate_gaps()
7372 MT_BUG_ON(mas->tree, 1); in mas_validate_gaps()
7379 MT_BUG_ON(mas->tree, 1); in mas_validate_gaps()
7389 MT_BUG_ON(mas->tree, max_gap > mas->max); in mas_validate_gaps()
7392 mt_dump(mas->tree, mt_dump_hex); in mas_validate_gaps()
7393 MT_BUG_ON(mas->tree, 1); in mas_validate_gaps()
7413 MT_BUG_ON(mas->tree, mas_mn(mas) == parent); in mas_validate_parent_slot()
7423 MT_BUG_ON(mas->tree, node != mas->node); in mas_validate_parent_slot()
7427 MT_BUG_ON(mas->tree, node == mas->node); in mas_validate_parent_slot()
7449 MT_BUG_ON(mas->tree, 1); in mas_validate_child_slot()
7456 MT_BUG_ON(mas->tree, 1); in mas_validate_child_slot()
7463 MT_BUG_ON(mas->tree, 1); in mas_validate_child_slot()
7519 MT_BUG_ON(mas->tree, 1); in mas_validate_limits()
7528 MT_BUG_ON(mas->tree, entry != NULL); in mas_validate_limits()
7587 __must_hold(mas->tree->ma_lock) in mt_validate()
7622 mas->tree, mas->node); in mas_dump()