Home
last modified time | relevance | path

Searched refs:paths (Results 1 – 14 of 14) sorted by relevance

/fs/bcachefs/
A Dbtree_iter.c1183 path = &trans->paths[path_idx]; in bch2_btree_path_traverse_one()
1263 btree_path_copy(trans, trans->paths + new, trans->paths + src); in btree_path_clone()
1678 struct btree_path *paths = p; in btree_paths_realloc() local
1679 *trans_paths_nr(paths) = nr; in btree_paths_realloc()
1680 memcpy(paths, trans->paths, trans->nr_paths * sizeof(struct btree_path)); in btree_paths_realloc()
1693 rcu_assign_pointer(trans->paths, paths); in btree_paths_realloc()
1776 path = trans->paths + path_idx; in bch2_path_get()
3479 trans->paths = trans->_paths; in __bch2_trans_get()
3593 trans->paths = NULL; in bch2_trans_put()
3673 struct btree_path *paths = rcu_dereference(trans->paths); in bch2_btree_trans_to_text() local
[all …]
A Dbtree_iter.h23 unsigned idx = path - trans->paths; in __btree_path_get()
39 EBUG_ON(path - trans->paths >= trans->nr_paths); in __btree_path_put()
89 return &container_of(paths, struct btree_trans_paths, paths[0])->nr_paths; in trans_paths_nr()
94 unsigned long *v = trans_paths_nr(paths); in trans_paths_allocated()
116 return trans->paths + *idx; in __trans_next_path()
146 ? trans->paths + trans->sorted[idx] in next_btree_path()
155 ? trans->paths + trans->sorted[idx - 1] in prev_btree_path()
215 if (trans->paths[path].ref > 1 || in bch2_btree_path_make_mut()
216 trans->paths[path].preserve) in bch2_btree_path_make_mut()
218 trans->paths[path].should_be_locked = false; in bch2_btree_path_make_mut()
[all …]
A Dbtree_update_interior.c773 struct btree_path *path = trans->paths + path_idx; in btree_update_nodes_written()
1615 struct btree_path *path = trans->paths + path_idx; in btree_split_insert_keys()
1701 trans->paths[path2].locks_want++; in btree_split()
1916 struct btree *b = path_l(trans->paths + path)->b; in bch2_btree_split_leaf()
1922 trans->paths[path].level, in bch2_btree_split_leaf()
1935 for (l = trans->paths[path].level + 1; in bch2_btree_split_leaf()
1947 struct btree_path *path = trans->paths + path_idx; in __btree_increase_depth()
2023 BUG_ON(!trans->paths[path].should_be_locked); in __bch2_foreground_maybe_merge()
2042 b = trans->paths[path].l[level].b; in __bch2_foreground_maybe_merge()
2062 m = trans->paths[sib_path].l[level].b; in __bch2_foreground_maybe_merge()
[all …]
A Dbtree_trans_commit.c51 struct bkey_s_c k = bch2_btree_path_peek_slot_exact(trans->paths + i->path, &u); in verify_update_old_key()
71 return (trans->paths + i->path)->l + i->level; in insert_l()
112 bch2_btree_node_unlock_write(trans, trans->paths + i->path, insert_l(trans, i)->b); in trans_lock_write_fail()
131 bch2_btree_node_prep_for_write(trans, trans->paths + i->path, insert_l(trans, i)->b); in bch2_trans_lock_write()
142 if (btree_node_locked_type(trans->paths + i->path, i->level) == in bch2_trans_unlock_updates_write()
145 trans->paths + i->path, insert_l(trans, i)->b); in bch2_trans_unlock_updates_write()
336 struct btree_path *path = trans->paths + i->path; in btree_insert_entry_checks()
621 : btree_key_can_insert_cached(trans, flags, trans->paths + i->path, u64s); in bch2_trans_commit_write_locked()
778 struct btree_path *path = trans->paths + i->path; in bch2_trans_commit_write_locked()
917 trace_ip, trans->paths + i->path); in bch2_trans_commit_error()
[all …]
A Dtrace.h676 __entry->path_idx = path - trans->paths;
745 __entry->path_idx = path - trans->paths;
1107 const char *paths),
1108 TP_ARGS(trans, caller_ip, paths)
1525 __entry->idx = path - trans->paths;
1566 __entry->idx = path - trans->paths;
1596 __entry->idx = path - trans->paths;
1633 __entry->idx = path - trans->paths;
1634 __entry->new_idx = new - trans->paths;
1687 __entry->idx = path - trans->paths;
[all …]
A Dbtree_key_cache.c316 struct btree_path *ck_path = trans->paths + ck_path_idx; in btree_key_cache_fill()
339 ck_path = trans->paths + ck_path_idx; in btree_key_cache_fill()
363 struct btree_path *path = trans->paths + path_idx; in btree_path_traverse_cached_fast()
393 EBUG_ON(trans->paths[path_idx].level); in bch2_btree_path_traverse_cached()
402 struct btree_path *path = trans->paths + path_idx; in bch2_btree_path_traverse_cached()
578 struct bkey_cached *ck = (void *) (trans->paths + insert_entry->path)->l[0].b; in bch2_btree_insert_key_cached()
A Dbtree_locking.c323 struct btree_path *paths = rcu_dereference(top->trans->paths); in bch2_check_for_deadlock() local
324 if (!paths) in bch2_check_for_deadlock()
327 unsigned long *paths_allocated = trans_paths_allocated(paths); in bch2_check_for_deadlock()
329 trans_for_each_path_idx_from(paths_allocated, *trans_paths_nr(paths), in bch2_check_for_deadlock()
331 struct btree_path *path = paths + path_idx; in bch2_check_for_deadlock()
A Dbtree_update.c343 struct btree_path *btree_path = trans->paths + path_idx; in flush_new_cached_update()
374 struct btree_path *path = trans->paths + path_idx; in bch2_trans_update_by_path()
436 __btree_path_get(trans, trans->paths + i->path, true); in bch2_trans_update_by_path()
480 ck = (void *) trans->paths[iter->key_cache_path].l[0].b; in bch2_trans_update_get_key_cache()
487 btree_path_set_should_be_locked(trans, trans->paths + iter->key_cache_path); in bch2_trans_update_get_key_cache()
519 struct btree_path *path = trans->paths + path_idx; in bch2_trans_update_ip()
A Dbtree_types.h474 struct btree_path paths[]; member
493 struct btree_path *paths; member
578 return trans->paths + iter->path; in btree_iter_path()
584 ? trans->paths + iter->key_cache_path in btree_iter_key_cache_path()
A DKconfig68 Randomly inject transaction restarts in a few core paths - may have a
A Dbtree_update_interior.h149 struct btree_path *path = trans->paths + path_idx; in bch2_foreground_maybe_merge_sibling()
/fs/smb/client/
A DKconfig136 messages in some error paths, slowing performance. This
/fs/xfs/
A DKconfig213 and extra sanity-checking functions in various code paths.
/fs/
A Dnamespace.c2335 void drop_collected_paths(struct path *paths, struct path *prealloc) in drop_collected_paths() argument
2337 for (struct path *p = paths; p->mnt; p++) in drop_collected_paths()
2339 if (paths != prealloc) in drop_collected_paths()
2340 kfree(paths); in drop_collected_paths()

Completed in 47 milliseconds