Home
last modified time | relevance | path

Searched refs:bio_sectors (Results 1 – 24 of 24) sorted by relevance

/drivers/md/bcache/
A Drequest.c114 bio_sectors(bio), (uint64_t) bio->bi_iter.bi_sector); in bch_data_invalidate()
116 while (bio_sectors(bio)) { in bch_data_invalidate()
117 unsigned int sectors = min(bio_sectors(bio), in bch_data_invalidate()
195 if (atomic_sub_return(bio_sectors(bio), &op->c->sectors_to_gc) < 0) in CLOSURE_CALLBACK()
222 if (!bch_alloc_sectors(op->c, k, bio_sectors(bio), in CLOSURE_CALLBACK()
412 bio_sectors(bio) & (c->cache->sb.block_size - 1)) { in check_should_bypass()
468 bch_rescale_priorities(c, bio_sectors(bio)); in check_should_bypass()
471 bch_mark_sectors_bypassed(c, dc, bio_sectors(bio)); in check_should_bypass()
539 unsigned int bio_sectors = bio_sectors(bio); in cache_lookup_fn() local
550 BUG_ON(bio_sectors <= sectors); in cache_lookup_fn()
[all …]
A Dwriteback.h117 bio_sectors(bio))) in should_writeback()
/drivers/md/
A Ddm.c508 return bio_sectors(bio); in dm_io_sectors()
1321 unsigned int bio_sectors = bio_sectors(bio); in dm_accept_partial_bio() local
1324 BUG_ON(bio_sectors > *tio->len_ptr); in dm_accept_partial_bio()
1325 BUG_ON(n_sectors > bio_sectors); in dm_accept_partial_bio()
1337 *tio->len_ptr -= bio_sectors - n_sectors; in dm_accept_partial_bio()
1346 io->sector_offset = bio_sectors(io->orig_bio); in dm_accept_partial_bio()
1464 io->sector_offset = bio_sectors(ci->bio); in setup_split_accounting()
1772 ci->sector_count = bio_sectors(bio); in init_clone_info()
A Ddm-log-writes.c670 if (!bio_sectors(bio) && !flush_bio) in log_writes_map()
704 block->nr_sectors = bio_to_dev_sectors(lc, bio_sectors(bio)); in log_writes_map()
716 if (flush_bio && !bio_sectors(bio)) { in log_writes_map()
A Ddm-zoned.h46 #define dmz_bio_blocks(bio) dmz_sect2blk(bio_sectors(bio))
A Ddm-integrity.c1612 if (likely(!bio->bi_status) && unlikely(bio_sectors(bio) != dio->range.n_sectors)) { in dec_in_flight()
1727 alignment = dio->range.logical_sector | bio_sectors(bio) | (PAGE_SIZE >> SECTOR_SHIFT); in integrity_recheck()
1914 if (unlikely(logical_sector + bio_sectors(bio) > ic->provided_data_sectors)) { in dm_integrity_check_limits()
1916 logical_sector, bio_sectors(bio), in dm_integrity_check_limits()
1923 logical_sector, bio_sectors(bio)); in dm_integrity_check_limits()
1966 sector_t end_boundary = (sec + bio_sectors(bio) - 1) >> log2_max_io_len; in dm_integrity_map()
1996 unsigned int wanted_tag_size = bio_sectors(bio) >> ic->sb->log2_sectors_per_block; in dm_integrity_map()
2208 dio->range.n_sectors = bio_sectors(bio); in dm_integrity_map_continue()
2430 dio->payload_len = ic->tuple_size * (bio_sectors(bio) >> ic->sb->log2_sectors_per_block); in dm_integrity_map_inline()
2439 if (WARN_ON(!sectors || sectors >= bio_sectors(bio))) { in dm_integrity_map_inline()
[all …]
A Ddm-zone.c147 return !op_is_flush(bio->bi_opf) && bio_sectors(bio); in dm_is_zone_write()
A Ddm-crypt.c271 return bio_sectors(bio); in get_max_request_sectors()
1182 if (!bio_sectors(bio) || !io->cc->tuple_size) in dm_crypt_integrity_io_alloc()
1189 tag_len = io->cc->tuple_size * (bio_sectors(bio) >> io->cc->sector_shift); in dm_crypt_integrity_io_alloc()
3508 if (bio_sectors(bio)) in crypt_map()
3518 if (unlikely(bio_sectors(bio) > max_sectors)) in crypt_map()
3535 unsigned int tag_len = cc->tuple_size * (bio_sectors(bio) >> cc->sector_shift); in crypt_map()
3543 if (bio_sectors(bio) > cc->tag_pool_max_sectors) in crypt_map()
A Ddm-ebs-target.c50 sector_t end_sector = __block_mod(bio->bi_iter.bi_sector, ec->u_bs) + bio_sectors(bio); in __nr_blocks()
A Draid1.c1295 r1_bio->sectors = bio_sectors(bio); in init_r1bio()
1378 if (max_sectors < bio_sectors(bio)) { in raid1_read_request()
1437 bio_sectors(bio)) < 0) in wait_blocked_rdev()
1586 if (max_sectors < bio_sectors(bio)) { in raid1_write_request()
1707 bio->bi_iter.bi_sector, bio_sectors(bio)); in raid1_make_request()
A Ddm-clone-target.c305 return (bio_data_dir(bio) == WRITE && bio_sectors(bio) == clone->region_size); in is_overwrite_bio()
490 bio_sectors(bio)); in process_discard_bio()
A Draid0.c614 if (sectors < bio_sectors(bio)) { in raid0_make_request()
A Draid10.c1205 if (max_sectors < bio_sectors(bio)) { in raid10_read_request()
1491 if (r10_bio->sectors < bio_sectors(bio)) { in raid10_write_request()
1667 if (bio_sectors(bio) < stripe_size*2) in raid10_handle_discard()
1690 split_size = bio_sectors(bio) - remainder; in raid10_handle_discard()
1876 int sectors = bio_sectors(bio); in raid10_make_request()
A Ddm-zoned-target.c631 unsigned int nr_sectors = bio_sectors(bio); in dmz_map()
A Ddm-raid1.c476 io->count = bio_sectors(bio); in map_region()
A Draid5.c5318 unsigned int bio_sectors = bio_sectors(bio); in in_chunk_boundary() local
5322 ((sector & (chunk_sectors - 1)) + bio_sectors); in in_chunk_boundary()
5409 end_sector = sector + bio_sectors(raid_bio); in raid5_read_one_chunk()
5428 if (rdev_has_badblock(rdev, sector, bio_sectors(raid_bio))) { in raid5_read_one_chunk()
5476 if (sectors < bio_sectors(raid_bio)) { in chunk_aligned_read()
6047 if (sectors_per_chunk - chunk_offset >= bio_sectors(bi)) in raid5_bio_lowest_chunk_sector()
A Ddm-writecache.c1556 if (unlikely((((unsigned int)bio->bi_iter.bi_sector | bio_sectors(bio)) & in writecache_map()
1882 } else if (unlikely(!bio_sectors(bio))) { in __writecache_writeback_pmem()
A Ddm-verity-target.c722 if (((unsigned int)bio->bi_iter.bi_sector | bio_sectors(bio)) & in verity_map()
A Ddm-snap.c2683 if (bio_sectors(bio) > available_sectors) in origin_map()
A Ddm-cache-target.c804 pb->len = bio_sectors(bio); in accounted_begin()
A Dmd.c449 if (bio_sectors(bio) != 0) in md_submit_bio()
611 if (bio_sectors(bio) == 0) { in md_flush_request()
8856 md_io_clone->sectors = bio_sectors(*bio); in md_clone_bio()
/drivers/nvme/target/
A Dio-cmd-bdev.c214 resid = bio_integrity_bytes(bi, bio_sectors(bio)); in nvmet_bdev_alloc_bip()
/drivers/scsi/
A Dsr.c325 block_sectors = bio_sectors(rq->bio); in sr_done()
/drivers/target/
A Dtarget_core_iblock.c716 resid = bio_integrity_bytes(bi, bio_sectors(bio)); in iblock_alloc_bip()

Completed in 124 milliseconds