Lines Matching refs:path
25 struct btree_path *path; in bch2_btree_node_lock_counts() local
34 trans_for_each_path(trans, path, i) in bch2_btree_node_lock_counts()
35 if (path != skip && &path->l[level].b->c == b) { in bch2_btree_node_lock_counts()
36 int t = btree_node_locked_type(path, level); in bch2_btree_node_lock_counts()
48 struct btree_path *path, struct btree *b) in bch2_btree_node_unlock_write() argument
50 bch2_btree_node_unlock_write_inlined(trans, path, b); in bch2_btree_node_unlock_write()
331 struct btree_path *path = paths + path_idx; in bch2_check_for_deadlock() local
332 if (!path->nodes_locked) in bch2_check_for_deadlock()
344 int lock_held = btree_node_locked_type(path, top->level); in bch2_check_for_deadlock()
349 b = &READ_ONCE(path->l[top->level].b)->c; in bch2_check_for_deadlock()
422 int __bch2_btree_node_lock_write(struct btree_trans *trans, struct btree_path *path, in __bch2_btree_node_lock_write() argument
441 mark_btree_node_locked_noreset(path, b->level, BTREE_NODE_INTENT_LOCKED); in __bch2_btree_node_lock_write()
447 struct btree_path *path, in bch2_btree_node_lock_write_nofail() argument
450 int ret = __btree_node_lock_write(trans, path, b, true); in bch2_btree_node_lock_write_nofail()
457 struct btree_path *path, in btree_path_get_locks() argument
462 unsigned l = path->level; in btree_path_get_locks()
465 if (!btree_path_node(path, l)) in btree_path_get_locks()
469 ? bch2_btree_node_upgrade(trans, path, l) in btree_path_get_locks()
470 : bch2_btree_node_relock(trans, path, l))) in btree_path_get_locks()
474 } while (l < path->locks_want); in btree_path_get_locks()
476 if (path->uptodate == BTREE_ITER_NEED_RELOCK) in btree_path_get_locks()
477 path->uptodate = BTREE_ITER_UPTODATE; in btree_path_get_locks()
479 return path->uptodate < BTREE_ITER_NEED_RELOCK ? 0 : -1; in btree_path_get_locks()
483 f->b = path->l[l].b; in btree_path_get_locks()
492 } else if (path->should_be_locked && !trans->restarted) { in btree_path_get_locks()
494 path->locks_want = l; in btree_path_get_locks()
498 __bch2_btree_path_unlock(trans, path); in btree_path_get_locks()
499 btree_path_set_dirty(trans, path, BTREE_ITER_NEED_TRAVERSE); in btree_path_get_locks()
507 path->l[l].b = upgrade in btree_path_get_locks()
516 struct btree_path *path, unsigned level, in __bch2_btree_node_relock() argument
519 struct btree *b = btree_path_node(path, level); in __bch2_btree_node_relock()
520 int want = __btree_lock_want(path, level); in __bch2_btree_node_relock()
525 if (six_relock_type(&b->c.lock, want, path->l[level].lock_seq) || in __bch2_btree_node_relock()
526 (btree_node_lock_seq_matches(path, b, level) && in __bch2_btree_node_relock()
528 mark_btree_node_locked(trans, path, level, want); in __bch2_btree_node_relock()
533 trace_and_count(trans->c, btree_path_relock_fail, trans, _RET_IP_, path, level); in __bch2_btree_node_relock()
540 struct btree_path *path, unsigned level) in bch2_btree_node_upgrade() argument
542 struct btree *b = path->l[level].b; in bch2_btree_node_upgrade()
544 if (!is_btree_node(path, level)) in bch2_btree_node_upgrade()
547 switch (btree_lock_want(path, level)) { in bch2_btree_node_upgrade()
549 BUG_ON(btree_node_locked(path, level)); in bch2_btree_node_upgrade()
552 BUG_ON(btree_node_intent_locked(path, level)); in bch2_btree_node_upgrade()
553 return bch2_btree_node_relock(trans, path, level); in bch2_btree_node_upgrade()
560 if (btree_node_intent_locked(path, level)) in bch2_btree_node_upgrade()
566 if (btree_node_locked(path, level) in bch2_btree_node_upgrade()
568 : six_relock_type(&b->c.lock, SIX_LOCK_intent, path->l[level].lock_seq)) in bch2_btree_node_upgrade()
571 if (btree_node_lock_seq_matches(path, b, level) && in bch2_btree_node_upgrade()
573 btree_node_unlock(trans, path, level); in bch2_btree_node_upgrade()
577 trace_and_count(trans->c, btree_path_upgrade_fail, trans, _RET_IP_, path, level); in bch2_btree_node_upgrade()
580 mark_btree_node_locked_noreset(path, level, BTREE_NODE_INTENT_LOCKED); in bch2_btree_node_upgrade()
590 struct btree_path *path) in bch2_btree_path_relock_intent() argument
594 for (l = path->level; in bch2_btree_path_relock_intent()
595 l < path->locks_want && btree_path_node(path, l); in bch2_btree_path_relock_intent()
597 if (!bch2_btree_node_relock(trans, path, l)) { in bch2_btree_path_relock_intent()
598 __bch2_btree_path_unlock(trans, path); in bch2_btree_path_relock_intent()
599 btree_path_set_dirty(trans, path, BTREE_ITER_NEED_TRAVERSE); in bch2_btree_path_relock_intent()
600 trace_and_count(trans->c, trans_restart_relock_path_intent, trans, _RET_IP_, path); in bch2_btree_path_relock_intent()
609 bool bch2_btree_path_relock_norestart(struct btree_trans *trans, struct btree_path *path) in bch2_btree_path_relock_norestart() argument
611 bool ret = !btree_path_get_locks(trans, path, false, NULL, 0); in bch2_btree_path_relock_norestart()
617 struct btree_path *path, unsigned long trace_ip) in __bch2_btree_path_relock() argument
619 if (!bch2_btree_path_relock_norestart(trans, path)) { in __bch2_btree_path_relock()
620 trace_and_count(trans->c, trans_restart_relock_path, trans, trace_ip, path); in __bch2_btree_path_relock()
628 struct btree_path *path, in __bch2_btree_path_upgrade_norestart() argument
631 path->locks_want = new_locks_want; in __bch2_btree_path_upgrade_norestart()
638 bool ret = !btree_path_get_locks(trans, path, true, NULL, 0) || in __bch2_btree_path_upgrade_norestart()
639 !path->should_be_locked; in __bch2_btree_path_upgrade_norestart()
641 bch2_btree_path_verify_locks(trans, path); in __bch2_btree_path_upgrade_norestart()
646 struct btree_path *path, in __bch2_btree_path_upgrade() argument
649 unsigned old_locks = path->nodes_locked; in __bch2_btree_path_upgrade()
650 unsigned old_locks_want = path->locks_want; in __bch2_btree_path_upgrade()
652 path->locks_want = max_t(unsigned, path->locks_want, new_locks_want); in __bch2_btree_path_upgrade()
655 int ret = btree_path_get_locks(trans, path, true, &f, in __bch2_btree_path_upgrade()
679 if (!path->cached && !trans->in_traverse_all) { in __bch2_btree_path_upgrade()
684 if (linked != path && in __bch2_btree_path_upgrade()
685 linked->cached == path->cached && in __bch2_btree_path_upgrade()
686 linked->btree_id == path->btree_id && in __bch2_btree_path_upgrade()
698 prt_printf(&buf, "btree %s pos\n", bch2_btree_id_str(path->btree_id)); in __bch2_btree_path_upgrade()
699 bch2_bpos_to_text(&buf, path->pos); in __bch2_btree_path_upgrade()
703 old_locks, path->nodes_locked); in __bch2_btree_path_upgrade()
708 path->l[f.l].lock_seq); in __bch2_btree_path_upgrade()
719 struct btree_path *path, in __bch2_btree_path_downgrade() argument
722 unsigned l, old_locks_want = path->locks_want; in __bch2_btree_path_downgrade()
727 EBUG_ON(path->locks_want < new_locks_want); in __bch2_btree_path_downgrade()
729 path->locks_want = new_locks_want; in __bch2_btree_path_downgrade()
731 while (path->nodes_locked && in __bch2_btree_path_downgrade()
732 (l = btree_path_highest_level_locked(path)) >= path->locks_want) { in __bch2_btree_path_downgrade()
733 if (l > path->level) { in __bch2_btree_path_downgrade()
734 btree_node_unlock(trans, path, l); in __bch2_btree_path_downgrade()
736 if (btree_node_intent_locked(path, l)) { in __bch2_btree_path_downgrade()
737 six_lock_downgrade(&path->l[l].b->c.lock); in __bch2_btree_path_downgrade()
738 mark_btree_node_locked_noreset(path, l, BTREE_NODE_READ_LOCKED); in __bch2_btree_path_downgrade()
744 bch2_btree_path_verify_locks(trans, path); in __bch2_btree_path_downgrade()
746 trace_path_downgrade(trans, _RET_IP_, path, old_locks_want); in __bch2_btree_path_downgrade()
753 struct btree_path *path; in bch2_trans_downgrade() local
759 trans_for_each_path(trans, path, i) in bch2_trans_downgrade()
760 if (path->ref) in bch2_trans_downgrade()
761 bch2_btree_path_downgrade(trans, path); in bch2_trans_downgrade()
766 struct btree_path *path; in __bch2_trans_unlock() local
769 trans_for_each_path(trans, path, i) in __bch2_trans_unlock()
770 __bch2_btree_path_unlock(trans, path); in __bch2_trans_unlock()
773 …tic noinline __cold void bch2_trans_relock_fail(struct btree_trans *trans, struct btree_path *path, in bch2_trans_relock_fail() argument
782 bch2_bpos_to_text(&buf, path->pos); in bch2_trans_relock_fail()
784 bch2_btree_id_str(path->btree_id), in bch2_trans_relock_fail()
785 f->l, path->l[f->l].lock_seq); in bch2_trans_relock_fail()
818 struct btree_path *path; in __bch2_trans_relock() local
821 trans_for_each_path(trans, path, i) { in __bch2_trans_relock()
825 if (path->should_be_locked && in __bch2_trans_relock()
826 (ret = btree_path_get_locks(trans, path, false, &f, in __bch2_trans_relock()
828 bch2_trans_relock_fail(trans, path, &f, trace, ip); in __bch2_trans_relock()
864 struct btree_path *path; in bch2_trans_unlock_write() local
867 trans_for_each_path(trans, path, i) in bch2_trans_unlock_write()
869 if (btree_node_write_locked(path, l)) in bch2_trans_unlock_write()
870 bch2_btree_node_unlock_write(trans, path, path->l[l].b); in bch2_trans_unlock_write()
885 void __bch2_btree_path_verify_locks(struct btree_trans *trans, struct btree_path *path) in __bch2_btree_path_verify_locks() argument
887 if (!path->nodes_locked && btree_path_node(path, path->level)) { in __bch2_btree_path_verify_locks()
893 BUG_ON(path->uptodate == BTREE_ITER_UPTODATE); in __bch2_btree_path_verify_locks()
894 BUG_ON(path->should_be_locked && trans->locked && !trans->restarted); in __bch2_btree_path_verify_locks()
897 if (!path->nodes_locked) in __bch2_btree_path_verify_locks()
901 int want = btree_lock_want(path, l); in __bch2_btree_path_verify_locks()
902 int have = btree_node_locked_type_nowrite(path, l); in __bch2_btree_path_verify_locks()
904 BUG_ON(!is_btree_node(path, l) && have != BTREE_NODE_UNLOCKED); in __bch2_btree_path_verify_locks()
906 BUG_ON(is_btree_node(path, l) && want != have); in __bch2_btree_path_verify_locks()
908 BUG_ON(btree_node_locked(path, l) && in __bch2_btree_path_verify_locks()
909 path->l[l].lock_seq != six_lock_seq(&path->l[l].b->c.lock)); in __bch2_btree_path_verify_locks()
915 struct btree_path *path; in bch2_trans_locked() local
918 trans_for_each_path(trans, path, i) in bch2_trans_locked()
919 if (path->nodes_locked) in bch2_trans_locked()
931 struct btree_path *path; in __bch2_trans_verify_locks() local
934 trans_for_each_path(trans, path, i) in __bch2_trans_verify_locks()
935 __bch2_btree_path_verify_locks(trans, path); in __bch2_trans_verify_locks()