Lines Matching refs:sa
38 error = xchk_ag_init(sc, agno, &sc->sa);
47 /* scrub teardown will take care of sc->sa for us */
361 struct xfs_agf *agf = sc->sa.agf_bp->b_addr;
365 if (!sc->sa.bno_cur)
368 error = xfs_alloc_query_all(sc->sa.bno_cur,
370 if (!xchk_should_check_xref(sc, &error, &sc->sa.bno_cur))
373 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
381 struct xfs_agf *agf = sc->sa.agf_bp->b_addr;
387 if (!sc->sa.cnt_cur)
391 error = xfs_alloc_lookup_le(sc->sa.cnt_cur, 0, -1U, &have);
392 if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
396 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
401 error = xfs_alloc_get_rec(sc->sa.cnt_cur, &agbno, &blocks, &have);
402 if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
405 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
413 struct xfs_agf *agf = sc->sa.agf_bp->b_addr;
420 if (sc->sa.rmap_cur) {
421 error = xfs_btree_count_blocks(sc->sa.rmap_cur, &blocks);
422 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
426 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
435 if ((xfs_sb_version_hasrmapbt(&mp->m_sb) && !sc->sa.rmap_cur) ||
436 !sc->sa.bno_cur || !sc->sa.cnt_cur)
440 error = xfs_btree_count_blocks(sc->sa.bno_cur, &blocks);
441 if (!xchk_should_check_xref(sc, &error, &sc->sa.bno_cur))
445 error = xfs_btree_count_blocks(sc->sa.cnt_cur, &blocks);
446 if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
451 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
459 struct xfs_agf *agf = sc->sa.agf_bp->b_addr;
463 if (!sc->sa.refc_cur)
466 error = xfs_btree_count_blocks(sc->sa.refc_cur, &blocks);
467 if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
470 xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
487 error = xchk_ag_btcur_init(sc, &sc->sa);
500 /* scrub teardown will take care of sc->sa for us */
521 agno = sc->sa.agno = sc->sm->sm_agno;
522 error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
523 &sc->sa.agf_bp, &sc->sa.agfl_bp);
526 xchk_buffer_recheck(sc, sc->sa.agf_bp);
528 agf = sc->sa.agf_bp->b_addr;
533 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
538 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
542 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
546 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
550 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
555 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
559 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
565 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
569 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
581 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
586 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
588 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
590 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
631 xfs_agnumber_t agno = sc->sa.agno;
637 xchk_block_set_corrupt(sc, sc->sa.agfl_bp);
672 error = xchk_ag_btcur_init(sc, &sc->sa);
682 * Scrub teardown will take care of sc->sa for us. Leave sc->sa
699 agno = sc->sa.agno = sc->sm->sm_agno;
700 error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
701 &sc->sa.agf_bp, &sc->sa.agfl_bp);
704 if (!sc->sa.agf_bp)
706 xchk_buffer_recheck(sc, sc->sa.agfl_bp);
714 agf = sc->sa.agf_bp->b_addr;
717 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
731 error = xfs_agfl_walk(sc->mp, sc->sa.agf_bp->b_addr,
732 sc->sa.agfl_bp, xchk_agfl_block, &sai);
741 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
750 xchk_block_set_corrupt(sc, sc->sa.agf_bp);
768 struct xfs_agi *agi = sc->sa.agi_bp->b_addr;
773 if (!sc->sa.ino_cur)
776 error = xfs_ialloc_count_inodes(sc->sa.ino_cur, &icount, &freecount);
777 if (!xchk_should_check_xref(sc, &error, &sc->sa.ino_cur))
781 xchk_block_xref_set_corrupt(sc, sc->sa.agi_bp);
789 struct xfs_agi *agi = sc->sa.agi_bp->b_addr;
796 if (sc->sa.ino_cur) {
797 error = xfs_btree_count_blocks(sc->sa.ino_cur, &blocks);
798 if (!xchk_should_check_xref(sc, &error, &sc->sa.ino_cur))
801 xchk_block_xref_set_corrupt(sc, sc->sa.agi_bp);
804 if (sc->sa.fino_cur) {
805 error = xfs_btree_count_blocks(sc->sa.fino_cur, &blocks);
806 if (!xchk_should_check_xref(sc, &error, &sc->sa.fino_cur))
809 xchk_block_xref_set_corrupt(sc, sc->sa.agi_bp);
827 error = xchk_ag_btcur_init(sc, &sc->sa);
838 /* scrub teardown will take care of sc->sa for us */
860 agno = sc->sa.agno = sc->sm->sm_agno;
861 error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
862 &sc->sa.agf_bp, &sc->sa.agfl_bp);
865 xchk_buffer_recheck(sc, sc->sa.agi_bp);
867 agi = sc->sa.agi_bp->b_addr;
872 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
877 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
881 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
886 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
890 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
898 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
903 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
907 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
913 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
917 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
922 xchk_block_set_corrupt(sc, sc->sa.agi_bp);
924 xchk_block_set_corrupt(sc, sc->sa.agi_bp);