| /linux/crypto/ |
| A D | authencesn.c | 94 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local 126 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local 142 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv() 184 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local 204 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 224 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local 249 skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv); in crypto_authenc_esn_decrypt_tail() 272 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_decrypt() local 278 cryptlen -= authsize; in crypto_authenc_esn_decrypt() 281 err = crypto_authenc_esn_copy(req, assoclen + cryptlen); in crypto_authenc_esn_decrypt() [all …]
|
| A D | aegis128-core.c | 347 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument 350 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final() 395 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_generic() local 404 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt_generic() 406 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt_generic() 417 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_generic() local 430 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_decrypt_generic() 456 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_simd() local 468 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); in crypto_aegis128_encrypt_simd() 480 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_simd() local [all …]
|
| A D | chacha20poly1305.c | 39 __le64 cryptlen; member 59 unsigned int cryptlen; member 131 if (rctx->cryptlen == 0) in chacha_decrypt() 145 rctx->cryptlen, creq->iv); in chacha_decrypt() 158 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue() 178 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail() 236 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher() 407 if (req->cryptlen == 0) in chacha_encrypt() 421 req->cryptlen, creq->iv); in chacha_encrypt() 434 rctx->cryptlen = req->cryptlen; in chachapoly_encrypt() [all …]
|
| A D | keywrap.c | 130 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt() 138 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt() 154 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt() 208 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_encrypt() 227 unsigned int nbytes = req->cryptlen; in crypto_kw_encrypt()
|
| A D | ccm.c | 129 unsigned int cryptlen) in format_input() argument 170 unsigned int cryptlen) in crypto_ccm_auth() argument 183 err = format_input(odata, req, cryptlen); in crypto_ccm_auth() 219 cryptlen += ilen; in crypto_ccm_auth() 237 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done() 293 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local 330 unsigned int cryptlen = req->cryptlen - authsize; in crypto_ccm_decrypt_done() local 338 err = crypto_ccm_auth(req, dst, cryptlen); in crypto_ccm_decrypt_done() 353 unsigned int cryptlen = req->cryptlen; in crypto_ccm_decrypt() local 359 cryptlen -= authsize; in crypto_ccm_decrypt() [all …]
|
| A D | gcm.c | 58 unsigned int cryptlen; member 178 unsigned int cryptlen) in crypto_gcm_init_crypt() argument 320 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue() 417 req->assoclen + req->cryptlen, in gcm_enc_copy_hash() 428 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue() 470 unsigned int cryptlen = req->cryptlen - authsize; in crypto_gcm_verify() local 505 unsigned int cryptlen = req->cryptlen; in crypto_gcm_decrypt() local 508 cryptlen -= authsize; in crypto_gcm_decrypt() 513 gctx->cryptlen = cryptlen; in crypto_gcm_decrypt() 753 req->cryptlen, iv); in crypto_rfc4106_crypt() [all …]
|
| A D | echainiv.c | 37 if (req->cryptlen < ivsize) in echainiv_encrypt() 51 req->assoclen + req->cryptlen, in echainiv_encrypt() 62 req->cryptlen, info); in echainiv_encrypt() 94 if (req->cryptlen < ivsize) in echainiv_decrypt() 104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
|
| A D | seqiv.c | 58 if (req->cryptlen < ivsize) in seqiv_aead_encrypt() 74 req->assoclen + req->cryptlen, in seqiv_aead_encrypt() 96 req->cryptlen - ivsize, info); in seqiv_aead_encrypt() 117 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt() 127 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
|
| A D | xts.c | 87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak() 164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final() 167 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final() 209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_encrypt_done() 229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_decrypt_done() 247 if (req->cryptlen < XTS_BLOCK_SIZE) in xts_init_crypt() 253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in xts_init_crypt() 272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_encrypt() 289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_decrypt()
|
| /linux/arch/x86/crypto/ |
| A D | aegis128-aesni-glue.c | 44 void *state, void *tag_xor, unsigned int cryptlen, 167 unsigned int cryptlen, in crypto_aegis128_aesni_crypt() argument 182 crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_aesni_crypt() 198 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_aesni_encrypt() local 200 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_encrypt() 203 req->assoclen + cryptlen, authsize, 1); in crypto_aegis128_aesni_encrypt() 220 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_aesni_decrypt() local 223 req->assoclen + cryptlen, authsize, 0); in crypto_aegis128_aesni_decrypt() 225 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_decrypt()
|
| A D | aesni-intel_glue.c | 281 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 282 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 296 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 302 subreq.cryptlen); in cts_cbc_encrypt() 338 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() 352 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt() 358 subreq.cryptlen); in cts_cbc_decrypt() 584 const unsigned int cryptlen = req->cryptlen; in xts_crypt() local 601 if (likely(src->length >= cryptlen && dst->length >= cryptlen && in xts_crypt() 1417 req->cryptlen, flags); in gcm_crypt() [all …]
|
| /linux/drivers/crypto/allwinner/sun8i-ce/ |
| A D | sun8i-ce-cipher.c | 45 if (areq->cryptlen == 0) { in sun8i_ce_cipher_need_fallback() 50 if (areq->cryptlen % 16) { in sun8i_ce_cipher_need_fallback() 55 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback() 71 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback() 112 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback() 145 areq->cryptlen, in sun8i_ce_cipher_prepare() 199 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare() 237 len = areq->cryptlen; in sun8i_ce_cipher_prepare() 252 len = areq->cryptlen; in sun8i_ce_cipher_prepare() 267 chan->timeout = areq->cryptlen; in sun8i_ce_cipher_prepare() [all …]
|
| /linux/arch/arm/crypto/ |
| A D | aes-ce-glue.c | 282 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 283 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 298 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 304 subreq.cryptlen); in cts_cbc_encrypt() 340 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 341 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() 356 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt() 362 subreq.cryptlen); in cts_cbc_decrypt() 449 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt() 455 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt() [all …]
|
| /linux/drivers/crypto/qce/ |
| A D | aead.c | 71 totallen = req->cryptlen + req->assoclen; in qce_aead_done() 124 totallen = rctx->cryptlen + assoclen; in qce_aead_prepare_dst_buf() 198 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf_assoclen() 200 cryptlen = rctx->cryptlen; in qce_aead_ccm_prepare_buf_assoclen() 201 totallen = cryptlen + req->assoclen; in qce_aead_ccm_prepare_buf_assoclen() 278 totallen = cryptlen + rctx->assoclen; in qce_aead_ccm_prepare_buf_assoclen() 336 unsigned int cryptlen; in qce_aead_ccm_prepare_buf() local 344 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf() 514 rctx->cryptlen = req->cryptlen; in qce_aead_crypt() 516 rctx->cryptlen = req->cryptlen - ctx->authsize; in qce_aead_crypt() [all …]
|
| A D | skcipher.c | 79 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle() 85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() 87 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle() 110 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle() 271 if (!req->cryptlen) in qce_skcipher_crypt() 279 if (!IS_ALIGNED(req->cryptlen, blocksize)) in qce_skcipher_crypt() 292 (IS_XTS(rctx->flags) && ((req->cryptlen <= aes_sw_max_len) || in qce_skcipher_crypt() 293 (req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt() 294 req->cryptlen % QCE_SECTOR_SIZE))))) { in qce_skcipher_crypt() 301 req->dst, req->cryptlen, req->iv); in qce_skcipher_crypt()
|
| /linux/drivers/crypto/allwinner/sun8i-ss/ |
| A D | sun8i-ss-cipher.c | 33 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sun8i_ss_need_fallback() 39 sg_nents_for_len(areq->dst, areq->cryptlen) > 8) { in sun8i_ss_need_fallback() 44 len = areq->cryptlen; in sun8i_ss_need_fallback() 59 len = areq->cryptlen; in sun8i_ss_need_fallback() 112 areq->cryptlen, areq->iv); in sun8i_ss_cipher_fallback() 128 unsigned int len = areq->cryptlen; in sun8i_ss_setup_ivs() 137 offset = areq->cryptlen - ivsize; in sun8i_ss_setup_ivs() 205 areq->cryptlen, in sun8i_ss_cipher() 253 len = areq->cryptlen; in sun8i_ss_cipher() 275 len = areq->cryptlen; in sun8i_ss_cipher() [all …]
|
| /linux/drivers/crypto/ |
| A D | omap-aes-gcm.c | 94 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local 102 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers() 108 cryptlen -= authlen; in omap_aes_gcm_copy_buffers() 111 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 113 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers() 131 if (cryptlen) { in omap_aes_gcm_copy_buffers() 137 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers() 149 dd->total = cryptlen; in omap_aes_gcm_copy_buffers() 162 if (cryptlen) { in omap_aes_gcm_copy_buffers() 163 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers() [all …]
|
| /linux/drivers/crypto/allwinner/sun4i-ss/ |
| A D | sun4i-ss-cipher.c | 30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll() 31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll() 41 if (!areq->cryptlen) in sun4i_ss_opti_poll() 57 algt->stat_bytes += areq->cryptlen; in sun4i_ss_opti_poll() 74 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 75 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 166 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback() 196 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll() 209 if (!areq->cryptlen) in sun4i_ss_cipher_poll() 265 ileft = areq->cryptlen; in sun4i_ss_cipher_poll() [all …]
|
| /linux/drivers/crypto/cavium/nitrox/ |
| A D | nitrox_aead.c | 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 227 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc() 229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 261 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec() 263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() 449 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc() 451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc() 481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec() 484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
|
| /linux/drivers/crypto/tegra/ |
| A D | tegra-se-aes.c | 56 unsigned int cryptlen; member 273 rctx->len = req->cryptlen; in tegra_aes_do_one_req() 443 if (!req->cryptlen) in tegra_aes_crypt() 1054 rctx->cryptlen, 0); in tegra_ccm_compute_auth() 1055 offset += rctx->cryptlen; in tegra_ccm_compute_auth() 1090 if (rctx->cryptlen) { in tegra_ccm_do_ctr() 1170 rctx->cryptlen = req->cryptlen; in tegra_ccm_do_one_req() 1182 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_ccm_do_one_req() 1241 rctx->cryptlen = req->cryptlen; in tegra_gcm_do_one_req() 1243 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_gcm_do_one_req() [all …]
|
| /linux/drivers/crypto/gemini/ |
| A D | sl3516-ce-cipher.c | 34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback() 119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback() 147 areq->cryptlen, in sl3516_ce_cipher() 179 len = areq->cryptlen; in sl3516_ce_cipher() 189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher() 196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher() 201 len = areq->cryptlen; in sl3516_ce_cipher() 211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher() 234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
|
| /linux/arch/arm64/crypto/ |
| A D | aes-glue.c | 292 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 293 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 308 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 314 subreq.cryptlen); in cts_cbc_encrypt() 349 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 350 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() 365 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt() 371 subreq.cryptlen); in cts_cbc_decrypt() 546 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt() 552 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt() [all …]
|
| /linux/drivers/crypto/intel/keembay/ |
| A D | keembay-ocs-aes-core.c | 289 if (req->cryptlen < AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input() 326 req->cryptlen, req->iv); in kmb_ocs_sk_common() 434 req->cryptlen, 0); in kmb_ocs_sk_prepare_inplace() 469 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace() 484 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace() 551 req->cryptlen, req->iv, iv_size); in kmb_ocs_sk_run() 571 req->cryptlen - iv_size, in kmb_ocs_sk_run() 580 req->cryptlen - iv_size, in kmb_ocs_sk_run() 652 req->cryptlen, req->iv); in kmb_ocs_aead_common() 745 in_size = req->cryptlen - tag_size; in kmb_ocs_aead_dma_prepare() [all …]
|
| /linux/drivers/crypto/xilinx/ |
| A D | zynqmp-aes-gcm.c | 93 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE in zynqmp_aes_aead_cipher() 96 dma_size = req->cryptlen + GCM_AES_IV_SIZE; in zynqmp_aes_aead_cipher() 109 data_size = req->cryptlen; in zynqmp_aes_aead_cipher() 110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); in zynqmp_aes_aead_cipher() 191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) { in zynqmp_fallback_check() 194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0) in zynqmp_fallback_check() 198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) { in zynqmp_fallback_check() 224 areq->cryptlen, areq->iv); in zynqmp_handle_aes_req()
|
| /linux/drivers/crypto/aspeed/ |
| A D | aspeed-hace-crypto.c | 38 areq->cryptlen, areq->iv); in aspeed_crypto_do_fallback() 52 if (areq->cryptlen == 0) in aspeed_crypto_need_fallback() 56 !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE)) in aspeed_crypto_need_fallback() 60 !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE)) in aspeed_crypto_need_fallback() 168 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_transfer() 193 crypto_engine->cipher_addr, req->cryptlen); in aspeed_sk_start() 202 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_start() 269 total = req->cryptlen; in aspeed_sk_start_sg() 300 total = req->cryptlen; in aspeed_sk_start_sg() 417 if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE)) in aspeed_des_crypt() [all …]
|