Lines Matching refs:a

57 static inline u64 alloc_field_v1_get(const struct bch_alloc *a,  in alloc_field_v1_get()  argument
63 if (!(a->fields & (1 << field))) in alloc_field_v1_get()
104 struct bkey_s_c_alloc_v2 a = bkey_s_c_to_alloc_v2(k); in bch2_alloc_unpack_v2() local
105 const u8 *in = a.v->data; in bch2_alloc_unpack_v2()
106 const u8 *end = bkey_val_end(a); in bch2_alloc_unpack_v2()
111 out->gen = a.v->gen; in bch2_alloc_unpack_v2()
112 out->oldest_gen = a.v->oldest_gen; in bch2_alloc_unpack_v2()
113 out->data_type = a.v->data_type; in bch2_alloc_unpack_v2()
116 if (fieldnr < a.v->nr_fields) { \ in bch2_alloc_unpack_v2()
137 struct bkey_s_c_alloc_v3 a = bkey_s_c_to_alloc_v3(k); in bch2_alloc_unpack_v3() local
138 const u8 *in = a.v->data; in bch2_alloc_unpack_v3()
139 const u8 *end = bkey_val_end(a); in bch2_alloc_unpack_v3()
144 out->gen = a.v->gen; in bch2_alloc_unpack_v3()
145 out->oldest_gen = a.v->oldest_gen; in bch2_alloc_unpack_v3()
146 out->data_type = a.v->data_type; in bch2_alloc_unpack_v3()
147 out->need_discard = BCH_ALLOC_V3_NEED_DISCARD(a.v); in bch2_alloc_unpack_v3()
148 out->need_inc_gen = BCH_ALLOC_V3_NEED_INC_GEN(a.v); in bch2_alloc_unpack_v3()
149 out->journal_seq = le64_to_cpu(a.v->journal_seq); in bch2_alloc_unpack_v3()
152 if (fieldnr < a.v->nr_fields) { \ in bch2_alloc_unpack_v3()
189 static unsigned bch_alloc_v1_val_u64s(const struct bch_alloc *a) in bch_alloc_v1_val_u64s() argument
194 if (a->fields & (1 << i)) in bch_alloc_v1_val_u64s()
203 struct bkey_s_c_alloc a = bkey_s_c_to_alloc(k); in bch2_alloc_v1_validate() local
207 bkey_fsck_err_on(bkey_val_u64s(a.k) < bch_alloc_v1_val_u64s(a.v), in bch2_alloc_v1_validate()
210 bkey_val_u64s(a.k), bch_alloc_v1_val_u64s(a.v)); in bch2_alloc_v1_validate()
244 struct bch_alloc_v4 a; in bch2_alloc_v4_validate() local
247 bkey_val_copy(&a, bkey_s_c_to_alloc_v4(k)); in bch2_alloc_v4_validate()
249 bkey_fsck_err_on(alloc_v4_u64s_noerror(&a) > bkey_val_u64s(k.k), in bch2_alloc_v4_validate()
252 alloc_v4_u64s_noerror(&a), bkey_val_u64s(k.k)); in bch2_alloc_v4_validate()
254 bkey_fsck_err_on(!BCH_ALLOC_V4_BACKPOINTERS_START(&a) && in bch2_alloc_v4_validate()
255 BCH_ALLOC_V4_NR_BACKPOINTERS(&a), in bch2_alloc_v4_validate()
259 bkey_fsck_err_on(alloc_data_type(a, a.data_type) != a.data_type, in bch2_alloc_v4_validate()
262 a.data_type, alloc_data_type(a, a.data_type)); in bch2_alloc_v4_validate()
265 bkey_fsck_err_on(a.io_time[i] > LRU_TIME_MAX, in bch2_alloc_v4_validate()
269 a.io_time[i], LRU_TIME_MAX); in bch2_alloc_v4_validate()
271 unsigned stripe_sectors = BCH_ALLOC_V4_BACKPOINTERS_START(&a) * sizeof(u64) > in bch2_alloc_v4_validate()
273 ? a.stripe_sectors in bch2_alloc_v4_validate()
276 switch (a.data_type) { in bch2_alloc_v4_validate()
281 a.dirty_sectors || in bch2_alloc_v4_validate()
282 a.cached_sectors || in bch2_alloc_v4_validate()
283 a.stripe, in bch2_alloc_v4_validate()
287 a.dirty_sectors, in bch2_alloc_v4_validate()
288 a.cached_sectors, in bch2_alloc_v4_validate()
289 a.stripe); in bch2_alloc_v4_validate()
296 bkey_fsck_err_on(!a.dirty_sectors && in bch2_alloc_v4_validate()
300 bch2_data_type_str(a.data_type)); in bch2_alloc_v4_validate()
303 bkey_fsck_err_on(!a.cached_sectors || in bch2_alloc_v4_validate()
304 a.dirty_sectors || in bch2_alloc_v4_validate()
306 a.stripe, in bch2_alloc_v4_validate()
310 bkey_fsck_err_on(!a.io_time[READ] && in bch2_alloc_v4_validate()
325 struct bch_alloc_v4 *a = bkey_s_to_alloc_v4(k).v; in bch2_alloc_v4_swab() local
327 a->journal_seq_nonempty = swab64(a->journal_seq_nonempty); in bch2_alloc_v4_swab()
328 a->journal_seq_empty = swab64(a->journal_seq_empty); in bch2_alloc_v4_swab()
329 a->flags = swab32(a->flags); in bch2_alloc_v4_swab()
330 a->dirty_sectors = swab32(a->dirty_sectors); in bch2_alloc_v4_swab()
331 a->cached_sectors = swab32(a->cached_sectors); in bch2_alloc_v4_swab()
332 a->io_time[0] = swab64(a->io_time[0]); in bch2_alloc_v4_swab()
333 a->io_time[1] = swab64(a->io_time[1]); in bch2_alloc_v4_swab()
334 a->stripe = swab32(a->stripe); in bch2_alloc_v4_swab()
335 a->nr_external_backpointers = swab32(a->nr_external_backpointers); in bch2_alloc_v4_swab()
336 a->stripe_sectors = swab32(a->stripe_sectors); in bch2_alloc_v4_swab()
340 unsigned dev, const struct bch_alloc_v4 *a) in __bch2_alloc_v4_to_text() argument
347 prt_printf(out, "gen %u oldest_gen %u data_type ", a->gen, a->oldest_gen); in __bch2_alloc_v4_to_text()
348 bch2_prt_data_type(out, a->data_type); in __bch2_alloc_v4_to_text()
350 prt_printf(out, "journal_seq_nonempty %llu\n", a->journal_seq_nonempty); in __bch2_alloc_v4_to_text()
351 prt_printf(out, "journal_seq_empty %llu\n", a->journal_seq_empty); in __bch2_alloc_v4_to_text()
352 prt_printf(out, "need_discard %llu\n", BCH_ALLOC_V4_NEED_DISCARD(a)); in __bch2_alloc_v4_to_text()
353 prt_printf(out, "need_inc_gen %llu\n", BCH_ALLOC_V4_NEED_INC_GEN(a)); in __bch2_alloc_v4_to_text()
354 prt_printf(out, "dirty_sectors %u\n", a->dirty_sectors); in __bch2_alloc_v4_to_text()
355 prt_printf(out, "stripe_sectors %u\n", a->stripe_sectors); in __bch2_alloc_v4_to_text()
356 prt_printf(out, "cached_sectors %u\n", a->cached_sectors); in __bch2_alloc_v4_to_text()
357 prt_printf(out, "stripe %u\n", a->stripe); in __bch2_alloc_v4_to_text()
358 prt_printf(out, "stripe_redundancy %u\n", a->stripe_redundancy); in __bch2_alloc_v4_to_text()
359 prt_printf(out, "io_time[READ] %llu\n", a->io_time[READ]); in __bch2_alloc_v4_to_text()
360 prt_printf(out, "io_time[WRITE] %llu\n", a->io_time[WRITE]); in __bch2_alloc_v4_to_text()
363 prt_printf(out, "fragmentation %llu\n", alloc_lru_idx_fragmentation(*a, ca)); in __bch2_alloc_v4_to_text()
364 prt_printf(out, "bp_start %llu\n", BCH_ALLOC_V4_BACKPOINTERS_START(a)); in __bch2_alloc_v4_to_text()
373 const struct bch_alloc_v4 *a = bch2_alloc_to_v4(k, &_a); in bch2_alloc_to_text() local
375 __bch2_alloc_v4_to_text(out, c, k.k->p.inode, a); in bch2_alloc_to_text()
452 struct bkey_s_c_alloc_v4 a; in bch2_alloc_to_v4_mut_inlined() local
455 ((a = bkey_s_c_to_alloc_v4(k), true) && in bch2_alloc_to_v4_mut_inlined()
456 BCH_ALLOC_V4_NR_BACKPOINTERS(a.v) == 0)) in bch2_alloc_to_v4_mut_inlined()
479 struct bkey_i_alloc_v4 *a = bch2_alloc_to_v4_mut_inlined(trans, k); in bch2_trans_start_alloc_update_noupdate() local
480 ret = PTR_ERR_OR_ZERO(a); in bch2_trans_start_alloc_update_noupdate()
483 return a; in bch2_trans_start_alloc_update_noupdate()
508 struct bkey_i_alloc_v4 *a = bch2_alloc_to_v4_mut_inlined(trans, k); in bch2_trans_start_alloc_update() local
509 if (IS_ERR(a)) { in bch2_trans_start_alloc_update()
511 return a; in bch2_trans_start_alloc_update()
514 ret = bch2_trans_update_ip(trans, &iter, &a->k_i, flags, _RET_IP_); in bch2_trans_start_alloc_update()
516 return unlikely(ret) ? ERR_PTR(ret) : a; in bch2_trans_start_alloc_update()
582 struct bch_alloc_v4 a; in bch2_bucket_gens_init()
583 u8 gen = bch2_alloc_to_v4(k, &a)->gen; in bch2_bucket_gens_init()
676 struct bch_alloc_v4 a; in bch2_alloc_read()
677 *bucket_gen(ca, k.k->p.offset) = bch2_alloc_to_v4(k, &a)->gen; in bch2_alloc_read()
728 const struct bch_alloc_v4 *a, in bch2_bucket_do_index() argument
734 if (a->data_type != BCH_DATA_free && in bch2_bucket_do_index()
735 a->data_type != BCH_DATA_need_discard) in bch2_bucket_do_index()
738 switch (a->data_type) { in bch2_bucket_do_index()
741 pos = alloc_freespace_pos(alloc_k.k->p, *a); in bch2_bucket_do_index()
1009 #define eval_state(_a, expr) ({ const struct bch_alloc_v4 *a = _a; expr; }) in bch2_trigger_alloc()
1011 #define bucket_flushed(a) (a->journal_seq_empty <= c->journal.flushed_seq_ondisk) in bch2_trigger_alloc() argument
1013 if (statechange(a->data_type == BCH_DATA_free) && in bch2_trigger_alloc()
1017 if (statechange(a->data_type == BCH_DATA_need_discard) && in bch2_trigger_alloc()
1022 if (statechange(a->data_type == BCH_DATA_cached) && in bch2_trigger_alloc()
1027 if (statechange(a->data_type == BCH_DATA_need_gc_gens)) in bch2_trigger_alloc()
1164 const struct bch_alloc_v4 *a; in bch2_check_alloc_key() local
1182 a = bch2_alloc_to_v4(alloc_k, &a_convert); in bch2_check_alloc_key()
1190 bool is_discarded = a->data_type == BCH_DATA_need_discard; in bch2_check_alloc_key()
1198 bch2_btree_iter_set_pos(trans, freespace_iter, alloc_freespace_pos(alloc_k.k->p, *a)); in bch2_check_alloc_key()
1204 bool is_free = a->data_type == BCH_DATA_free; in bch2_check_alloc_key()
1218 if (fsck_err_on(a->gen != alloc_gen(k, gens_offset), in bch2_check_alloc_key()
1221 alloc_gen(k, gens_offset), a->gen, in bch2_check_alloc_key()
1238 g->v.gens[gens_offset] = a->gen; in bch2_check_alloc_key()
1435 const struct bch_alloc_v4 *a = bch2_alloc_to_v4(alloc_k, &a_convert); in bch2_check_discard_freespace_key() local
1437 if (a->data_type != state || in bch2_check_discard_freespace_key()
1439 genbits != alloc_freespace_genbits(*a))) { in bch2_check_discard_freespace_key()
1447 a->data_type == state, in bch2_check_discard_freespace_key()
1448 genbits >> 56, alloc_freespace_genbits(*a) >> 56)) in bch2_check_discard_freespace_key()
1454 *gen = a->gen; in bch2_check_discard_freespace_key()
1704 const struct bch_alloc_v4 *a; in bch2_check_alloc_to_lru_ref() local
1721 a = bch2_alloc_to_v4(alloc_k, &a_convert); in bch2_check_alloc_to_lru_ref()
1723 u64 lru_idx = alloc_lru_idx_fragmentation(*a, ca); in bch2_check_alloc_to_lru_ref()
1732 if (a->data_type != BCH_DATA_cached) in bch2_check_alloc_to_lru_ref()
1735 if (fsck_err_on(!a->io_time[READ], in bch2_check_alloc_to_lru_ref()
1752 a = &a_mut->v; in bch2_check_alloc_to_lru_ref()
1757 a->io_time[READ], in bch2_check_alloc_to_lru_ref()
1838 struct bkey_i_alloc_v4 *a; in bch2_discard_one_bucket() local
1863 a = bch2_alloc_to_v4_mut(trans, k); in bch2_discard_one_bucket()
1864 ret = PTR_ERR_OR_ZERO(a); in bch2_discard_one_bucket()
1868 if (a->v.data_type != BCH_DATA_need_discard) { in bch2_discard_one_bucket()
1906 SET_BCH_ALLOC_V4_NEED_DISCARD(&a->v, false); in bch2_discard_one_bucket()
1907 alloc_data_type_set(&a->v, a->v.data_type); in bch2_discard_one_bucket()
1909 ret = bch2_trans_update(trans, &iter, &a->k_i, 0); in bch2_discard_one_bucket()
2172 const struct bch_alloc_v4 *a = bch2_alloc_to_v4(alloc_k, &a_convert); in invalidate_one_bucket() local
2175 if (lru_pos_time(lru_iter->pos) != alloc_lru_idx_read(*a)) in invalidate_one_bucket()
2185 BUG_ON(a->data_type != BCH_DATA_cached); in invalidate_one_bucket()
2186 BUG_ON(a->dirty_sectors); in invalidate_one_bucket()
2188 if (!a->cached_sectors) { in invalidate_one_bucket()
2194 unsigned cached_sectors = a->cached_sectors; in invalidate_one_bucket()
2195 u8 gen = a->gen; in invalidate_one_bucket()
2348 const struct bch_alloc_v4 *a = bch2_alloc_to_v4(k, &a_convert); in bch2_dev_freespace_init() local
2350 ret = bch2_bucket_do_index(trans, ca, k, a, true) ?: in bch2_dev_freespace_init()
2477 struct bkey_i_alloc_v4 *a = in __bch2_bucket_io_time_reset() local
2479 int ret = PTR_ERR_OR_ZERO(a); in __bch2_bucket_io_time_reset()
2484 if (a->v.io_time[rw] == now) in __bch2_bucket_io_time_reset()
2487 a->v.io_time[rw] = now; in __bch2_bucket_io_time_reset()
2489 ret = bch2_trans_update(trans, &iter, &a->k_i, 0) ?: in __bch2_bucket_io_time_reset()