| /crypto/ |
| A D | authencesn.c | 92 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local 124 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local 140 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv() 168 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local 185 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 205 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local 230 skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv); in crypto_authenc_esn_decrypt_tail() 253 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_decrypt() local 259 cryptlen -= authsize; in crypto_authenc_esn_decrypt() 262 memcpy_sglist(dst, req->src, assoclen + cryptlen); in crypto_authenc_esn_decrypt() [all …]
|
| A D | chacha20poly1305.c | 46 unsigned int cryptlen; member 107 if (rctx->cryptlen == 0) in chacha_decrypt() 121 rctx->cryptlen, creq->iv); in chacha_decrypt() 141 __le64 cryptlen; in poly_hash() member 152 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_hash() 172 total = rctx->cryptlen; in poly_hash() 185 tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_hash() 190 if (rctx->cryptlen != req->cryptlen) in poly_hash() 249 if (req->cryptlen == 0) in chacha_encrypt() 276 rctx->cryptlen = req->cryptlen; in chachapoly_encrypt() [all …]
|
| A D | aegis128-core.c | 344 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument 347 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final() 392 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_generic() local 401 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt_generic() 403 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt_generic() 414 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_generic() local 427 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_decrypt_generic() 453 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_simd() local 465 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); in crypto_aegis128_encrypt_simd() 477 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_simd() local [all …]
|
| A D | ccm.c | 125 unsigned int cryptlen) in format_input() argument 166 unsigned int cryptlen) in crypto_ccm_auth() argument 179 err = format_input(odata, req, cryptlen); in crypto_ccm_auth() 215 cryptlen += ilen; in crypto_ccm_auth() 233 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done() 289 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local 326 unsigned int cryptlen = req->cryptlen - authsize; in crypto_ccm_decrypt_done() local 334 err = crypto_ccm_auth(req, dst, cryptlen); in crypto_ccm_decrypt_done() 349 unsigned int cryptlen = req->cryptlen; in crypto_ccm_decrypt() local 355 cryptlen -= authsize; in crypto_ccm_decrypt() [all …]
|
| A D | echainiv.c | 36 if (req->cryptlen < ivsize) in echainiv_encrypt() 45 req->assoclen + req->cryptlen); in echainiv_encrypt() 50 req->cryptlen, info); in echainiv_encrypt() 82 if (req->cryptlen < ivsize) in echainiv_decrypt() 92 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
|
| A D | gcm.c | 56 unsigned int cryptlen; member 174 unsigned int cryptlen) in crypto_gcm_init_crypt() argument 316 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue() 413 req->assoclen + req->cryptlen, in gcm_enc_copy_hash() 424 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue() 466 unsigned int cryptlen = req->cryptlen - authsize; in crypto_gcm_verify() local 501 unsigned int cryptlen = req->cryptlen; in crypto_gcm_decrypt() local 504 cryptlen -= authsize; in crypto_gcm_decrypt() 509 gctx->cryptlen = cryptlen; in crypto_gcm_decrypt() 749 req->cryptlen, iv); in crypto_rfc4106_crypt() [all …]
|
| A D | seqiv.c | 58 if (req->cryptlen < ivsize) in seqiv_aead_encrypt() 69 req->assoclen + req->cryptlen); in seqiv_aead_encrypt() 85 req->cryptlen - ivsize, info); in seqiv_aead_encrypt() 106 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt() 116 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
|
| A D | xts.c | 87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak() 164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final() 167 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final() 209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_encrypt_done() 229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_decrypt_done() 247 if (req->cryptlen < XTS_BLOCK_SIZE) in xts_init_crypt() 253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in xts_init_crypt() 272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_encrypt() 289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_decrypt()
|
| A D | authenc.c | 123 req->assoclen + req->cryptlen, in authenc_geniv_ahash_done() 144 req->assoclen + req->cryptlen); in crypto_authenc_genicv() 152 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv() 179 unsigned int cryptlen = req->cryptlen; in crypto_authenc_encrypt() local 196 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt() 235 req->cryptlen - authsize, req->iv); in crypto_authenc_decrypt_tail() 268 req->assoclen + req->cryptlen - authsize); in crypto_authenc_decrypt()
|
| A D | aegis128-neon.c | 58 unsigned int cryptlen, in crypto_aegis128_final_simd() argument 64 ret = crypto_aegis128_final_neon(state, tag_xor, assoclen, cryptlen, in crypto_aegis128_final_simd()
|
| A D | adiantum.c | 224 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_header() 252 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_message() 283 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_finish() 301 if (dst_nents == 1 && dst->offset + req->cryptlen <= PAGE_SIZE) { in adiantum_finish() 343 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_crypt() 350 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) in adiantum_crypt() 362 if (src_nents == 1 && src->offset + req->cryptlen <= PAGE_SIZE) { in adiantum_crypt() 404 if (round_up(stream_len, CHACHA_BLOCK_SIZE) <= req->cryptlen) in adiantum_crypt()
|
| A D | krb5enc.c | 159 skcipher_request_set_crypt(skreq, src, dst, req->cryptlen, req->iv); in krb5enc_dispatch_encrypt() 173 req->assoclen + req->cryptlen, in krb5enc_insert_checksum() 220 ahash_request_set_crypt(ahreq, req->src, hash, req->assoclen + req->cryptlen); in krb5enc_dispatch_encrypt_hash() 292 req->assoclen + req->cryptlen - authsize); in krb5enc_dispatch_decrypt_hash() 328 req->cryptlen - authsize, req->iv); in krb5enc_dispatch_decrypt()
|
| A D | cts.c | 110 lastn = req->cryptlen - offset; in cts_cbc_encrypt() 150 unsigned int nbytes = req->cryptlen; in crypto_cts_encrypt() 192 lastn = req->cryptlen - offset; in cts_cbc_decrypt() 244 unsigned int nbytes = req->cryptlen; in crypto_cts_decrypt()
|
| A D | aegis-neon.h | 14 unsigned int cryptlen,
|
| A D | hctr2.c | 177 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; in hctr2_hash_tweak() 199 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message() 271 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_crypt() 275 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) in hctr2_crypt()
|
| A D | aegis128-neon-inner.c | 319 unsigned int cryptlen, in crypto_aegis128_final_neon() argument 329 vmov_n_u64(8ULL * cryptlen)); in crypto_aegis128_final_neon()
|
| A D | crypto_null.c | 65 memcpy_sglist(req->dst, req->src, req->cryptlen); in null_skcipher_crypt()
|
| A D | aegis.h | 43 unsigned int cryptlen,
|
| A D | pcrypt.c | 114 req->cryptlen, req->iv); in pcrypt_aead_encrypt() 165 req->cryptlen, req->iv); in pcrypt_aead_decrypt()
|
| A D | skcipher.c | 48 walk->total = req->cryptlen; in skcipher_walk_virt() 104 walk->total = req->cryptlen; in skcipher_walk_aead_encrypt() 116 walk->total = req->cryptlen - crypto_aead_authsize(tfm); in skcipher_walk_aead_decrypt()
|
| A D | drbg.c | 1841 u32 cryptlen = min3(inlen, outlen, (u32)DRBG_OUTSCRATCHLEN); in drbg_kcapi_sym_ctr() local 1845 cryptlen, drbg->V); in drbg_kcapi_sym_ctr() 1853 memcpy(outbuf, drbg->outscratchpad, cryptlen); in drbg_kcapi_sym_ctr() 1854 memzero_explicit(drbg->outscratchpad, cryptlen); in drbg_kcapi_sym_ctr() 1856 outlen -= cryptlen; in drbg_kcapi_sym_ctr() 1857 outbuf += cryptlen; in drbg_kcapi_sym_ctr()
|
| A D | essiv.c | 150 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in essiv_skcipher_crypt() 250 aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv); in essiv_aead_crypt()
|
| A D | aead.c | 102 if (req->cryptlen < crypto_aead_authsize(aead)) in crypto_aead_decrypt()
|
| A D | lrw.c | 234 req->cryptlen, req->iv); in lrw_init_crypt()
|
| A D | ctr.c | 213 req->cryptlen, iv); in crypto_rfc3686_crypt()
|