Lines Matching refs:raid_bio

5393 static int raid5_read_one_chunk(struct mddev *mddev, struct bio *raid_bio)  in raid5_read_one_chunk()  argument
5402 if (!in_chunk_boundary(mddev, raid_bio)) { in raid5_read_one_chunk()
5407 sector = raid5_compute_sector(conf, raid_bio->bi_iter.bi_sector, 0, in raid5_read_one_chunk()
5409 end_sector = sector + bio_sectors(raid_bio); in raid5_read_one_chunk()
5428 if (rdev_has_badblock(rdev, sector, bio_sectors(raid_bio))) { in raid5_read_one_chunk()
5433 md_account_bio(mddev, &raid_bio); in raid5_read_one_chunk()
5434 raid_bio->bi_next = (void *)rdev; in raid5_read_one_chunk()
5436 align_bio = bio_alloc_clone(rdev->bdev, raid_bio, GFP_NOIO, in raid5_read_one_chunk()
5439 align_bio->bi_private = raid_bio; in raid5_read_one_chunk()
5464 mddev_trace_remap(mddev, align_bio, raid_bio->bi_iter.bi_sector); in raid5_read_one_chunk()
5469 static struct bio *chunk_aligned_read(struct mddev *mddev, struct bio *raid_bio) in chunk_aligned_read() argument
5472 sector_t sector = raid_bio->bi_iter.bi_sector; in chunk_aligned_read()
5476 if (sectors < bio_sectors(raid_bio)) { in chunk_aligned_read()
5478 split = bio_split(raid_bio, sectors, GFP_NOIO, &conf->bio_split); in chunk_aligned_read()
5479 bio_chain(split, raid_bio); in chunk_aligned_read()
5480 submit_bio_noacct(raid_bio); in chunk_aligned_read()
5481 raid_bio = split; in chunk_aligned_read()
5484 if (!raid5_read_one_chunk(mddev, raid_bio)) in chunk_aligned_read()
5485 return raid_bio; in chunk_aligned_read()
6571 static int retry_aligned_read(struct r5conf *conf, struct bio *raid_bio, in retry_aligned_read() argument
6590 logical_sector = raid_bio->bi_iter.bi_sector & in retry_aligned_read()
6594 last_sector = bio_end_sector(raid_bio); in retry_aligned_read()
6609 conf->retry_read_aligned = raid_bio; in retry_aligned_read()
6614 if (!add_stripe_bio(sh, raid_bio, dd_idx, 0, 0)) { in retry_aligned_read()
6616 conf->retry_read_aligned = raid_bio; in retry_aligned_read()
6627 bio_endio(raid_bio); in retry_aligned_read()