| /linux/fs/xfs/scrub/ |
| A D | rmap.c | 336 bs->sc->sa.rmap_cur, 0); in xchk_rmapbt_mark_bitmap() 497 xchk_btree_xref_process_error(sc, sc->sa.rmap_cur, in xchk_rmapbt_walk_ag_metadata() 498 sc->sa.rmap_cur->bc_nlevels - 1, &error); in xchk_rmapbt_walk_ag_metadata() 513 struct xfs_btree_cur *cur = sc->sa.rmap_cur; in xchk_rmapbt_check_bitmaps() 593 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_only_owned_by() 600 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by() 602 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by() 604 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by() 618 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_not_owned_by() 625 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_not_owned_by() [all …]
|
| A D | refcount.c | 302 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_refcountbt_xref_rmap() 312 error = xfs_rmap_query_range(sc->sa.rmap_cur, &low, &high, in xchk_refcountbt_xref_rmap() 314 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_refcountbt_xref_rmap() 320 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcountbt_xref_rmap() 401 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcountbt_xref_gaps() 403 xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur); in xchk_refcountbt_xref_gaps() 497 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_refcount_xref_rmap() 504 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_refcount_xref_rmap() 509 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcount_xref_rmap() 512 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_refcount_xref_rmap() [all …]
|
| A D | rmap_repair.c | 972 struct xfs_btree_cur *rmap_cur, in xrep_rmap_try_reserve() argument 1055 error = xfs_btree_bload_compute_geometry(rmap_cur, in xrep_rmap_try_reserve() 1072 struct xfs_btree_cur *rmap_cur) in xrep_rmap_reserve_space() argument 1080 error = xfs_btree_bload_compute_geometry(rmap_cur, in xrep_rmap_reserve_space() 1098 error = xrep_rmap_try_reserve(rr, rmap_cur, &freesp_blocks, in xrep_rmap_reserve_space() 1274 struct xfs_btree_cur *rmap_cur; in xrep_rmap_build_new_tree() local 1299 rmap_cur = xfs_rmapbt_init_cursor(sc->mp, NULL, NULL, pag); in xrep_rmap_build_new_tree() 1300 xfs_btree_stage_afakeroot(rmap_cur, &rr->new_btree.afake); in xrep_rmap_build_new_tree() 1306 error = xrep_rmap_reserve_space(rr, rmap_cur); in xrep_rmap_build_new_tree() 1347 xfs_btree_del_cursor(rmap_cur, 0); in xrep_rmap_build_new_tree() [all …]
|
| A D | ialloc.c | 659 if (!sc->sa.ino_cur || !sc->sa.rmap_cur || in xchk_iallocbt_xref_rmap_btreeblks() 675 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_btreeblks() 677 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_btreeblks() 696 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_inodes() 700 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_inodes() 702 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_inodes() 706 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_iallocbt_xref_rmap_inodes()
|
| A D | refcount_repair.c | 239 struct xfs_btree_cur *cur = rr->sc->sa.rmap_cur; in xrep_refc_walk_rmaps() 395 error = xfs_btree_decrement(sc->sa.rmap_cur, 0, &have_gt); in xrep_refc_push_rmaps_at() 399 xfs_btree_mark_sick(sc->sa.rmap_cur); in xrep_refc_push_rmaps_at() 432 error = xfs_btree_goto_left_edge(sc->sa.rmap_cur); in xrep_refc_find_refcounts() 437 while (xfs_btree_has_more_records(sc->sa.rmap_cur)) { in xrep_refc_find_refcounts()
|
| A D | common.c | 567 if (sa->rmap_cur) in xchk_ag_btcur_free() 568 xfs_btree_del_cursor(sa->rmap_cur, XFS_BTREE_ERROR); in xchk_ag_btcur_free() 579 sa->rmap_cur = NULL; in xchk_ag_btcur_free() 609 sa->rmap_cur = xfs_rmapbt_init_cursor(mp, sc->tp, in xchk_ag_btcur_init() 611 xchk_ag_btree_del_cursor_if_sick(sc, &sa->rmap_cur, in xchk_ag_btcur_init()
|
| A D | bmap.c | 174 error = xfs_rmap_lookup_le_range(info->sc->sa.rmap_cur, agbno, in xchk_bmap_get_rmap() 177 error = xfs_rmap_lookup_le(info->sc->sa.rmap_cur, agbno, in xchk_bmap_get_rmap() 180 if (!xchk_should_check_xref(info->sc, &error, &info->sc->sa.rmap_cur)) in xchk_bmap_get_rmap() 200 if (!info->sc->sa.rmap_cur || xchk_skip_xref(info->sc->sm)) in xchk_bmap_xref_rmap() 267 if (!info->sc->sa.rmap_cur || xchk_skip_xref(info->sc->sm)) in xchk_bmap_xref_rmap_cow()
|
| A D | agheader.c | 446 if (sc->sa.rmap_cur) { in xchk_agf_xref_btreeblks() 447 error = xfs_btree_count_blocks(sc->sa.rmap_cur, &blocks); in xchk_agf_xref_btreeblks() 448 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_agf_xref_btreeblks() 461 if ((xfs_has_rmapbt(mp) && !sc->sa.rmap_cur) || in xchk_agf_xref_btreeblks()
|
| A D | scrub.h | 116 struct xfs_btree_cur *rmap_cur; member
|
| A D | agheader_repair.c | 459 struct xfs_btree_cur *rmap_cur; member 502 error = xfs_rmap_has_other_keys(ra->rmap_cur, agbno, 1, in xrep_agfl_check_extent() 577 ra.rmap_cur = xfs_rmapbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.pag); in xrep_agfl_collect_blocks() 579 xfs_btree_del_cursor(ra.rmap_cur, error); in xrep_agfl_collect_blocks()
|
| A D | cow_repair.c | 290 error = xfs_rmap_query_range(sc->sa.rmap_cur, &rm_low, &rm_high, in xrep_cow_find_bad()
|
| A D | alloc_repair.c | 380 error = xfs_rmap_query_all(sc->sa.rmap_cur, xrep_abt_walk_rmap, ra); in xrep_abt_find_freespace()
|
| A D | bmap_repair.c | 359 error = xfs_rmap_query_all(sc->sa.rmap_cur, xrep_bmap_walk_rmap, rb); in xrep_bmap_scan_ag()
|
| A D | ialloc_repair.c | 484 error = xfs_rmap_query_all(sc->sa.rmap_cur, xrep_ibt_walk_rmap, ri); in xrep_ibt_find_inodes()
|
| A D | btree.c | 405 if (!bs->sc->sa.rmap_cur && xfs_btree_is_rmap(bs->cur->bc_ops)) in xchk_btree_check_block_owner()
|
| A D | repair.c | 860 sa->rmap_cur = xfs_rmapbt_init_cursor(mp, sc->tp, sa->agf_bp, in xrep_ag_btcur_init()
|