Lines Matching refs:sector_size

182 	unsigned short sector_size;  member
265 sector_align = max(bdev_logical_block_size(cc->dev->bdev), (unsigned)cc->sector_size); in get_max_request_size()
462 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_lmk_ctr()
620 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_tcw_ctr()
793 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
995 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
1021 memcpy(data_offset, data2 + sg2->offset, cc->sector_size); in crypt_iv_elephant()
1026 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1027 diffuser_b_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1028 diffuser_a_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1029 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1032 for (i = 0; i < (cc->sector_size / 32); i++) in crypt_iv_elephant()
1036 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1037 diffuser_a_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1038 diffuser_b_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1039 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1217 if (1 << bi->interval_exp != cc->sector_size) { in crypt_integrity_ctr()
1338 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_aead()
1365 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1371 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1393 cc->sector_size, iv); in crypt_convert_block_aead()
1400 cc->sector_size + cc->integrity_tag_size, iv); in crypt_convert_block_aead()
1419 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_aead()
1420 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_aead()
1439 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_skcipher()
1462 sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_skcipher()
1465 sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_skcipher()
1486 skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); in crypt_convert_block_skcipher()
1496 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_skcipher()
1497 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_skcipher()
1592 unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; in crypt_convert()
3227 } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { in crypt_ctr_optional()
3228 if (cc->sector_size < (1 << SECTOR_SHIFT) || in crypt_ctr_optional()
3229 cc->sector_size > 4096 || in crypt_ctr_optional()
3230 (cc->sector_size & (cc->sector_size - 1))) { in crypt_ctr_optional()
3234 if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { in crypt_ctr_optional()
3238 cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; in crypt_ctr_optional()
3297 cc->sector_size = (1 << SECTOR_SHIFT); in crypt_ctr()
3379 (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { in crypt_ctr()
3535 if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) in crypt_map()
3538 if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) in crypt_map()
3616 num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); in crypt_status()
3635 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3636 DMEMIT(" sector_size:%d", cc->sector_size); in crypt_status()
3661 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3662 DMEMIT(",sector_size=%d", cc->sector_size); in crypt_status()
3759 max_t(unsigned int, limits->logical_block_size, cc->sector_size); in crypt_io_hints()
3761 max_t(unsigned int, limits->physical_block_size, cc->sector_size); in crypt_io_hints()
3762 limits->io_min = max_t(unsigned int, limits->io_min, cc->sector_size); in crypt_io_hints()