Home
last modified time | relevance | path

Searched refs:b (Results 1 – 25 of 248) sorted by relevance

12345678910

/fs/bcachefs/
A Dbset.c108 bch2_dump_bset(c, b, bset(b, t), t - b->set); in bch2_dump_btree_node()
390 b->nsets = 0; in bch2_btree_keys_init()
391 memset(&b->nr, 0, sizeof(b->nr)); in bch2_btree_keys_init()
396 bch2_bset_set_no_aux_tree(b, b->set); in bch2_btree_keys_init()
692 __btree_node_key_to_offset(b, btree_bkey_first(b, t)); in __build_rw_aux_tree()
710 t->size = min(bkey_to_cacheline(b, t, btree_bkey_last(b, t)), in __build_ro_aux_tree()
802 t = &b->set[b->nsets++]; in bch2_bset_init_first()
811 BUG_ON(bset_byte_offset(b, bne) >= btree_buf_bytes(b)); in bch2_bset_init_next()
819 t = &b->set[b->nsets++]; in bch2_bset_init_next()
1003 btree_keys_account_key_add(&b->nr, t - b->set, src); in bch2_bset_insert()
[all …]
A Dbtree_cache.c40 if (r->b) in bch2_recalc_btree_reserve()
112 munmap(b->aux_data, btree_aux_data_bytes(b)); in __btree_node_data_free()
147 BUG_ON(b->data || b->aux_data); in btree_node_data_alloc()
151 b->data = kvmalloc(btree_buf_bytes(b), gfp); in btree_node_data_alloc()
157 b->aux_data = mmap(NULL, btree_aux_data_bytes(b), in btree_node_data_alloc()
177 if (!b) in __btree_node_mem_alloc()
190 if (!b) in __bch2_btree_node_mem_alloc()
218 if (b != btree_node_root(c, b) && !btree_node_pinned(b)) { in bch2_node_pin()
275 b->hash_val = btree_ptr_hash_val(&b->key); in __bch2_btree_node_hash_insert()
1333 if (b) in bch2_btree_node_prefetch()
[all …]
A Dbtree_io.c164 b = (b - 1) / 2; in sort_bkey_ptrs()
167 b = (b - 1) / 2; in sort_bkey_ptrs()
358 set_btree_bset(b, b->set, &b->data->keys); in btree_node_sort()
432 if (!bset_written(b, bset(b, &b->set[unwritten_idx]))) in btree_node_compact()
490 BUG_ON(bset_written(b, bset(b, &b->set[1]))); in bch2_btree_init_next()
1290 set_btree_bset(b, b->set, &b->data->keys); in bch2_btree_node_read_done()
1392 struct btree *b = rb->b; in btree_node_read_work() local
1565 struct btree *b = ra->b; in CLOSURE_CALLBACK() local
1735 ra->b = b; in btree_node_read_all_replicas()
1754 rb->b = b; in btree_node_read_all_replicas()
[all …]
A Dbtree_update_interior.c68 if (b == btree_node_root(c, b)) { in bch2_btree_node_check_topology()
276 p->b[p->nr++] = b; in bch2_btree_node_free_never_used()
377 b = p->b[--p->nr]; in bch2_btree_node_alloc()
541 p->b[p->nr++] = b; in bch2_btree_reserve_get()
757 b = READ_ONCE(as->b); in btree_update_nodes_written()
777 path->l[b->c.level].b = b; in btree_update_nodes_written()
791 if (as->b == b) { in btree_update_nodes_written()
900 as->b = b; in btree_update_updated_node()
1032 b->ob.v[--b->ob.nr]; in bch2_btree_update_get_open_buckets()
1329 bch2_btree_id_root(c, b->c.btree_id)->b = b; in bch2_btree_set_root_inmem()
[all …]
A Dbtree_update_interior.h80 struct btree *b; member
150 struct btree *b; in bch2_foreground_maybe_merge_sibling() local
157 b = path->l[level].b; in bch2_foreground_maybe_merge_sibling()
219 b->sib_u64s[0] = b->nr.live_u64s; in btree_node_reset_sib_u64s()
220 b->sib_u64s[1] = b->nr.live_u64s; in btree_node_reset_sib_u64s()
225 return (void *) b->data + btree_buf_bytes(b); in btree_data_end()
230 return (void *) ((u64 *) btree_data_end(b) - b->whiteout_u64s); in unwritten_whiteouts_start()
240 return (void *) b->data + (b->written << 9); in write_block()
273 btree_bkey_last(b, bset_tree_last(b))); in bch2_btree_keys_u64s_remaining()
277 if (bset_written(b, btree_bset_last(b))) in bch2_btree_keys_u64s_remaining()
[all …]
A Dbset.h232 BUG_ON(t < b->set); in bch2_bset_set_no_aux_tree()
234 for (; t < b->set + ARRAY_SIZE(b->set); t++) { in bch2_bset_set_no_aux_tree()
246 b->format = f; in btree_node_set_format()
249 len = bch2_compile_bkey_format(&b->format, b->aux_data); in btree_node_set_format()
252 b->unpack_fn_len = len; in btree_node_set_format()
254 bch2_bset_set_no_aux_tree(b, b->set); in btree_node_set_format()
301 for_each_bset(b, t) in bch2_bkey_to_bset_inlined()
381 return bkey_iter_cmp(b, in btree_node_iter_cmp()
406 struct btree *b) in __bch2_btree_node_iter_peek_all() argument
485 b->nr.live_u64s += delta; in btree_keys_account_val_delta()
[all …]
A Dbtree_locking.h176 if (!b->c.lock.write_lock_recurse) { in __bch2_btree_node_unlock_write()
181 linked->l[b->c.level].lock_seq++; in __bch2_btree_node_unlock_write()
184 six_unlock_write(&b->c.lock); in __bch2_btree_node_unlock_write()
189 struct btree *b) in bch2_btree_node_unlock_write_inlined() argument
191 EBUG_ON(path->l[b->c.level].b != b); in bch2_btree_node_unlock_write_inlined()
192 EBUG_ON(path->l[b->c.level].lock_seq != six_lock_seq(&b->c.lock)); in bch2_btree_node_unlock_write_inlined()
235 trans->locking = b; in __btree_node_lock_nopath()
278 if (&path->l[level].b->c == b && in btree_node_lock_increment()
289 struct btree_bkey_cached_common *b, in btree_node_lock() argument
318 EBUG_ON(&path->l[b->level].b->c != b); in __btree_node_lock_write()
[all …]
A Djournal_sb.c27 u64 *b; in bch2_sb_journal_validate() local
34 if (!b) in bch2_sb_journal_validate()
42 if (!b[0]) { in bch2_sb_journal_validate()
60 if (b[i] == b[i + 1]) { in bch2_sb_journal_validate()
67 kfree(b); in bch2_sb_journal_validate()
116 b = kmalloc_array(nr, sizeof(*b), GFP_KERNEL); in bch2_sb_journal_v2_validate()
117 if (!b) in bch2_sb_journal_v2_validate()
124 if (b[i].end <= b[i].start) { in bch2_sb_journal_v2_validate()
134 sort(b, nr, sizeof(*b), u64_range_cmp, NULL); in bch2_sb_journal_v2_validate()
154 if (b[i].end > b[i + 1].start) { in bch2_sb_journal_v2_validate()
[all …]
A Dbtree_types.h333 struct btree *b; member
415 return !b->cached in btree_node_pos()
676 return b->writes + btree_node_write_idx(b); in btree_current_write()
681 return b->writes + (btree_node_write_idx(b) ^ 1); in btree_prev_write()
687 return b->set + b->nsets - 1; in bset_tree_last()
714 __btree_node_ptr_to_offset(b, vstruct_last(bset(b, t))); in set_btree_bset_end()
726 return bset(b, b->set); in btree_bset_first()
731 return bset(b, bset_tree_last(b)); in btree_bset_last()
775 return bset_u64s(t) - b->nr.bset_u64s[t - b->set]; in bset_dead_u64s()
800 return __btree_node_type(b->c.level, b->c.btree_id); in btree_node_type()
[all …]
A Dbtree_gc.c187 ret = bch2_journal_key_delete(c, b->c.btree_id, b->c.level + 1, b->key.k.p); in set_node_max()
237 bch2_btree_id_level_to_text(&buf, b->c.btree_id, b->c.level); in btree_check_node_boundaries()
306 bch2_btree_id_level_to_text(&buf, b->c.btree_id, b->c.level); in btree_repair_node_end()
365 b->c.btree_id, b->c.level - 1, in bch2_btree_repair_topology_recurse()
476 b->c.btree_id, b->c.level - 1, in bch2_btree_repair_topology_recurse()
599 struct btree *b = r->b; in bch2_check_topology() local
642 struct btree *b = path_l(path)->b; in bch2_gc_mark_key() local
649 *prev = b; in bch2_gc_mark_key()
751 if (b != btree_node_root(c, b)) { in bch2_gc_btree()
1226 b < gens->nbuckets; b++) in bch2_gc_gens()
[all …]
A Dbuckets_waiting_for_journal.c32 mutex_lock(&b->lock); in bch2_bucket_journal_seq_ready()
33 t = b->t; in bch2_bucket_journal_seq_ready()
44 mutex_unlock(&b->lock); in bch2_bucket_journal_seq_ready()
97 mutex_lock(&b->lock); in bch2_set_bucket_needs_journal_commit()
102 t = b->t; in bch2_set_bucket_needs_journal_commit()
141 b->t = n; in bch2_set_bucket_needs_journal_commit()
147 mutex_unlock(&b->lock); in bch2_set_bucket_needs_journal_commit()
156 kvfree(b->t); in bch2_fs_buckets_waiting_for_journal_exit()
165 mutex_init(&b->lock); in bch2_fs_buckets_waiting_for_journal_init()
167 b->t = kvmalloc(sizeof(*b->t) + in bch2_fs_buckets_waiting_for_journal_init()
[all …]
A Deytzinger.c49 *(u32 *)(b + n) = t; in swap_words_32()
75 *(u64 *)(b + n) = t; in swap_words_64()
80 *(u32 *)(b + n) = t; in swap_words_64()
84 *(u32 *)(b + n) = t; in swap_words_64()
101 ((char *)a)[n] = ((char *)b)[n]; in swap_bytes()
102 ((char *)b)[n] = t; in swap_bytes()
133 swap_words_64(a, b, size); in do_swap()
135 swap_words_32(a, b, size); in do_swap()
137 swap_bytes(a, b, size); in do_swap()
148 return cmp(a, b, priv); in do_cmp()
[all …]
A Dbtree_trans_commit.c78 insert_l(trans, &i[0])->b == insert_l(trans, &i[-1])->b; in same_leaf_as_prev()
85 insert_l(trans, &i[0])->b == insert_l(trans, &i[1])->b; in same_leaf_as_next()
90 struct btree *b) in bch2_btree_node_prep_for_write() argument
102 if (want_new_bset(c, b)) in bch2_btree_node_prep_for_write()
155 struct btree *b, in bch2_btree_bset_insert_key() argument
163 EBUG_ON(bset_written(b, btree_bset_last(b))); in bch2_btree_bset_insert_key()
218 k = bch2_btree_node_iter_bset_pos(node_iter, b, bset_tree_last(b)); in bch2_btree_bset_insert_key()
241 old = READ_ONCE(b->flags); in __btree_node_flush()
296 struct btree *b = path_l(path)->b; in bch2_btree_insert_key_leaf() local
320 b->sib_u64s[0] = max(0, (int) b->sib_u64s[0] + live_u64s_added); in bch2_btree_insert_key_leaf()
[all …]
A Dbtree_locking.c15 lockdep_set_notrack_class(&b->lock); in bch2_btree_lock_init()
31 if (IS_ERR_OR_NULL(b)) in bch2_btree_node_lock_counts()
35 if (path != skip && &path->l[level].b->c == b) { in bch2_btree_node_lock_counts()
349 b = &READ_ONCE(path->l[top->level].b)->c; in bch2_check_for_deadlock()
351 if (IS_ERR_OR_NULL(b)) { in bch2_check_for_deadlock()
426 int readers = bch2_btree_node_lock_counts(trans, NULL, b, b->level).n[SIX_LOCK_read]; in __bch2_btree_node_lock_write()
483 f->b = path->l[l].b; in btree_path_get_locks()
507 path->l[l].b = upgrade in btree_path_get_locks()
542 struct btree *b = path->l[level].b; in bch2_btree_node_upgrade() local
704 prt_printf(&buf, "node %s ", IS_ERR(f.b) ? bch2_err_str(PTR_ERR(f.b)) : in __bch2_btree_path_upgrade()
[all …]
A Dbtree_iter.c636 path->l[b->c.level].b = b; in bch2_btree_path_level_init()
709 if (path->l[level].b == b) { in bch2_trans_node_drop()
746 struct btree *b = READ_ONCE(r->b); in btree_path_lock_root() local
776 if (likely(b == READ_ONCE(r->b) && in btree_path_lock_root()
781 path->l[path->level].b = b; in btree_path_lock_root()
1527 struct btree *b = path_l(path)->b; in bch2_btree_path_to_text_short() local
1925 if (!b) in bch2_btree_iter_peek_node()
1941 return b; in bch2_btree_iter_peek_node()
1957 return b; in bch2_btree_iter_peek_node_and_restart()
2028 b = path->l[path->level].b; in bch2_btree_iter_next_node()
[all …]
A Ddebug.c123 b->written, v->written); in bch2_btree_verify_replica()
143 bch2_btree_node_io_lock(b); in __bch2_btree_verify()
158 BUG_ON(b->nsets != 1); in __bch2_btree_verify()
165 bkey_copy(&v->key, &b->key); in __bch2_btree_verify()
166 v->c.level = b->c.level; in __bch2_btree_verify()
412 : b->key.k.p; in bch2_read_btree_formats()
465 struct btree *b) in bch2_cached_btree_node_to_text() argument
471 bch2_btree_id_level_to_text(out, b->c.btree_id, b->c.level); in bch2_cached_btree_node_to_text()
489 &b->writes[0].journal, b->writes[0].journal.seq); in bch2_cached_btree_node_to_text()
491 &b->writes[1].journal, b->writes[1].journal.seq); in bch2_cached_btree_node_to_text()
[all …]
A Dbuckets.h22 return ((sector_t) b) * ca->mi.bucket_size; in bucket_to_sector()
51 static inline void bucket_lock(struct bucket *b) in bucket_lock() argument
59 return bucket_valid(ca, b) in gc_bucket()
60 ? genradix_ptr(&ca->buckets_gc, b) in gc_bucket()
76 return gens->b + b; in bucket_gen()
81 u8 *gen = bucket_gen(ca, b); in bucket_gen_get_rcu()
88 return bucket_gen_get_rcu(ca, b); in bucket_gen_get()
135 static inline int gen_cmp(u8 a, u8 b) in gen_cmp() argument
137 return (s8) (a - b); in gen_cmp()
140 static inline int gen_after(u8 a, u8 b) in gen_after() argument
[all …]
/fs/smb/common/
A Dcifs_md4.c51 #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s)) argument
52 #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) argument
53 #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) argument
57 u32 a, b, c, d; in md4_transform() local
60 b = hash[1]; in md4_transform()
64 ROUND1(a, b, c, d, in[0], 3); in md4_transform()
65 ROUND1(d, a, b, c, in[1], 7); in md4_transform()
68 ROUND1(a, b, c, d, in[4], 3); in md4_transform()
69 ROUND1(d, a, b, c, in[5], 7); in md4_transform()
72 ROUND1(a, b, c, d, in[8], 3); in md4_transform()
[all …]
/fs/
A Dd_path.c170 b = *p; in prepend_path()
190 b = *p; in prepend_path()
192 if (b.len == p->len) in prepend_path()
195 *p = b; in prepend_path()
221 prepend_char(&b, 0); in __d_path()
233 prepend_char(&b, 0); in d_absolute_path()
290 prepend_char(&b, 0); in d_path()
340 b = *p; in __dentry_path()
357 if (b.len == p->len) in __dentry_path()
366 prepend_char(&b, 0); in dentry_path_raw()
[all …]
/fs/afs/
A Dvl_list.c95 const u8 *b = *_b; in afs_extract_vl_addrs() local
116 memcpy(x, b, 4); in afs_extract_vl_addrs()
120 b += 4; in afs_extract_vl_addrs()
128 memcpy(x, b, 16); in afs_extract_vl_addrs()
132 b += 16; in afs_extract_vl_addrs()
147 *_b = b; in afs_extract_vl_addrs()
151 *_b = b; in afs_extract_vl_addrs()
200 b += sizeof(*hdr); in afs_extract_vlserver_list()
206 bs.source = *b++; in afs_extract_vlserver_list()
207 bs.status = *b++; in afs_extract_vlserver_list()
[all …]
/fs/ext4/
A Dhash.c44 #define ROUND(f, a, b, c, d, x, s) \ argument
45 (a += f(b, c, d) + x, a = rol32(a, s))
58 ROUND(F, a, b, c, d, in[0] + K1, 3); in half_md4_transform()
59 ROUND(F, d, a, b, c, in[1] + K1, 7); in half_md4_transform()
60 ROUND(F, c, d, a, b, in[2] + K1, 11); in half_md4_transform()
61 ROUND(F, b, c, d, a, in[3] + K1, 19); in half_md4_transform()
62 ROUND(F, a, b, c, d, in[4] + K1, 3); in half_md4_transform()
63 ROUND(F, d, a, b, c, in[5] + K1, 7); in half_md4_transform()
64 ROUND(F, c, d, a, b, in[6] + K1, 11); in half_md4_transform()
65 ROUND(F, b, c, d, a, in[7] + K1, 19); in half_md4_transform()
[all …]
/fs/nfs/blocklayout/
A Ddev.c119 b->type = be32_to_cpup(p++); in nfs4_block_decode_volume()
121 switch (b->type) { in nfs4_block_decode_volume()
127 if (!b->simple.nr_sigs || b->simple.nr_sigs > PNFS_BLOCK_MAX_UUIDS) { in nfs4_block_decode_volume()
132 b->simple.len = 4 + 4; in nfs4_block_decode_volume()
141 b->simple.sigs[i].sig_len); in nfs4_block_decode_volume()
148 memcpy(&b->simple.sigs[i].sig, p, in nfs4_block_decode_volume()
149 b->simple.sigs[i].sig_len); in nfs4_block_decode_volume()
151 b->simple.len += 8 + 4 + \ in nfs4_block_decode_volume()
161 b->slice.volume = be32_to_cpup(p++); in nfs4_block_decode_volume()
208 if (b->scsi.designator_len > 256) in nfs4_block_decode_volume()
[all …]
/fs/nfsd/
A Dblocklayoutxdr.c32 p = xdr_encode_opaque_fixed(p, &b->vol_id, in nfsd4_block_encode_layoutget()
34 p = xdr_encode_hyper(p, b->foff); in nfsd4_block_encode_layoutget()
35 p = xdr_encode_hyper(p, b->len); in nfsd4_block_encode_layoutget()
36 p = xdr_encode_hyper(p, b->soff); in nfsd4_block_encode_layoutget()
37 *p++ = cpu_to_be32(b->es); in nfsd4_block_encode_layoutget()
47 switch (b->type) { in nfsd4_block_encode_volume()
54 *p++ = cpu_to_be32(b->type); in nfsd4_block_encode_volume()
57 p = xdr_encode_opaque(p, b->simple.sig, b->simple.sig_len); in nfsd4_block_encode_volume()
65 *p++ = cpu_to_be32(b->type); in nfsd4_block_encode_volume()
66 *p++ = cpu_to_be32(b->scsi.code_set); in nfsd4_block_encode_volume()
[all …]
A Dnfscache.c161 spin_lock(&b->cache_lock); in nfsd_reply_cache_free()
163 spin_unlock(&b->cache_lock); in nfsd_reply_cache_free()
337 spin_lock(&b->cache_lock); in nfsd_reply_cache_scan()
457 lru_put_end(b, ret); in nfsd_cache_insert()
485 struct nfsd_drc_bucket *b; in nfsd_cache_lookup() local
506 spin_lock(&b->cache_lock); in nfsd_cache_lookup()
513 spin_unlock(&b->cache_lock); in nfsd_cache_lookup()
559 spin_unlock(&b->cache_lock); in nfsd_cache_lookup()
590 struct nfsd_drc_bucket *b; in nfsd_cache_update() local
629 spin_lock(&b->cache_lock); in nfsd_cache_update()
[all …]
/fs/jfs/
A Djfs_dmap.h72 #define BLKTODMAP(b,s) \ argument
73 ((((b) >> 13) + ((b) >> 23) + ((b) >> 33) + 3 + 1) << (s))
87 #define BLKTOL0(b,s) \ argument
88 (((((b) >> 23) << 10) + ((b) >> 23) + ((b) >> 33) + 2 + 1) << (s))
102 #define BLKTOL1(b,s) \ argument
103 (((((b) >> 33) << 20) + (((b) >> 33) << 10) + ((b) >> 33) + 1 + 1) << (s))
109 #define BLKTOCTL(b,s,l) \ argument
110 (((l) == 2) ? 1 : ((l) == 1) ? BLKTOL1((b),(s)) : BLKTOL0((b),(s)))
121 #define BLKTOAG(b,sbi) ((b) >> ((sbi)->bmap->db_agl2size)) argument
268 #define LITOL2BSZ(n,m,b) ((((n) == 0) ? (m) : cnttz((n))) + (b)) argument
[all …]

Completed in 808 milliseconds

12345678910