Lines Matching refs:iv_size
174 unsigned int iv_size; member
317 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen()
326 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen()
335 memset(iv, 0, cc->iv_size); in crypt_iv_plain64be_gen()
337 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
349 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen()
395 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen()
398 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen()
406 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen()
522 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one()
540 memset(iv, 0, cc->iv_size); in crypt_iv_lmk_gen()
561 crypto_xor(dst + sg->offset, iv, cc->iv_size); in crypt_iv_lmk_post()
591 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { in crypt_iv_tcw_ctr()
603 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); in crypt_iv_tcw_ctr()
617 int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; in crypt_iv_tcw_init()
619 memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); in crypt_iv_tcw_init()
620 memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], in crypt_iv_tcw_init()
630 memset(tcw->iv_seed, 0, cc->iv_size); in crypt_iv_tcw_wipe()
693 if (cc->iv_size > 8) in crypt_iv_tcw_gen()
695 cc->iv_size - 8); in crypt_iv_tcw_gen()
723 get_random_bytes(iv, cc->iv_size); in crypt_iv_random_gen()
735 if (crypto_skcipher_blocksize(any_tfm(cc)) != cc->iv_size) { in crypt_iv_eboiv_ctr()
756 memset(buf, 0, cc->iv_size); in crypt_iv_eboiv_gen()
759 sg_init_one(&src, page_address(ZERO_PAGE(0)), cc->iv_size); in crypt_iv_eboiv_gen()
760 sg_init_one(&dst, iv, cc->iv_size); in crypt_iv_eboiv_gen()
761 skcipher_request_set_crypt(req, &src, &dst, cc->iv_size, buf); in crypt_iv_eboiv_gen()
1252 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1258 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1266 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1267 cc->iv_size + sizeof(uint64_t); in org_tag_of_dmreq()
1300 BUG_ON(cc->integrity_iv_size && cc->integrity_iv_size != cc->iv_size); in crypt_convert_block_aead()
1329 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1335 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1342 memcpy(org_iv, tag_iv, cc->iv_size); in crypt_convert_block_aead()
1349 memcpy(tag_iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1352 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1355 aead_request_set_ad(req, sizeof(uint64_t) + cc->iv_size); in crypt_convert_block_aead()
1445 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_skcipher()
2748 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_ivmode()
2750 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_ivmode()
2752 if (cc->iv_size) in crypt_ctr_ivmode()
2754 cc->iv_size = max(cc->iv_size, in crypt_ctr_ivmode()
2800 cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; in crypt_ctr_ivmode()
2804 cc->integrity_iv_size = cc->iv_size; in crypt_ctr_ivmode()
2918 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_cipher_new()
2920 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_cipher_new()
3242 iv_size_padding + cc->iv_size + in crypt_ctr()
3243 cc->iv_size + in crypt_ctr()