/linux-6.3-rc2/arch/arm64/crypto/ |
A D | aes-glue.c | 134 u8 dg[AES_BLOCK_SIZE]; 465 u8 buf[AES_BLOCK_SIZE]; in xctr_encrypt() 510 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() 712 .ivsize = AES_BLOCK_SIZE, 727 .ivsize = AES_BLOCK_SIZE, 743 .ivsize = AES_BLOCK_SIZE, 852 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey() 900 in += AES_BLOCK_SIZE; in mac_do_update() 921 len %= AES_BLOCK_SIZE; in mac_update() 984 2 * AES_BLOCK_SIZE, [all …]
|
A D | aes-neonbs-glue.c | 61 } __aligned(AES_BLOCK_SIZE); 226 dst += blocks * AES_BLOCK_SIZE; in ctr_encrypt() 227 src += blocks * AES_BLOCK_SIZE; in ctr_encrypt() 286 AES_BLOCK_SIZE) - 2; in __xts_crypt() 322 in += blocks * AES_BLOCK_SIZE; in __xts_crypt() 391 .walksize = 8 * AES_BLOCK_SIZE, 405 .walksize = 8 * AES_BLOCK_SIZE, 406 .ivsize = AES_BLOCK_SIZE, 420 .chunksize = AES_BLOCK_SIZE, 422 .ivsize = AES_BLOCK_SIZE, [all …]
|
A D | aes-ce-ccm-glue.c | 80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt() 152 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt() 161 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt() 171 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 205 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt() 206 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt() 215 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt() 268 .ivsize = AES_BLOCK_SIZE, 269 .chunksize = AES_BLOCK_SIZE, [all …]
|
A D | ghash-ce-glue.c | 341 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt() 405 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt() 406 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt() 466 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_encrypt() 474 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_decrypt() 504 u8 iv[AES_BLOCK_SIZE]; in rfc4106_encrypt() 517 u8 iv[AES_BLOCK_SIZE]; in rfc4106_decrypt() 528 .chunksize = AES_BLOCK_SIZE, 529 .maxauthsize = AES_BLOCK_SIZE, 544 .chunksize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/arch/arm/crypto/ |
A D | aes-neonbs-glue.c | 49 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE); 240 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() 345 u8 buf[2 * AES_BLOCK_SIZE]; in __xts_crypt() 391 AES_BLOCK_SIZE, 0); in __xts_crypt() 405 AES_BLOCK_SIZE + tail, 1); in __xts_crypt() 447 .ivsize = AES_BLOCK_SIZE, 464 .chunksize = AES_BLOCK_SIZE, 466 .ivsize = AES_BLOCK_SIZE, 480 .chunksize = AES_BLOCK_SIZE, 482 .ivsize = AES_BLOCK_SIZE, [all …]
|
A D | aes-ce-glue.c | 54 u8 b[AES_BLOCK_SIZE]; 462 AES_BLOCK_SIZE) - 2; in xts_encrypt() 534 AES_BLOCK_SIZE) - 2; in xts_decrypt() 613 .ivsize = AES_BLOCK_SIZE, 628 .ivsize = AES_BLOCK_SIZE, 629 .walksize = 2 * AES_BLOCK_SIZE, 644 .ivsize = AES_BLOCK_SIZE, 645 .chunksize = AES_BLOCK_SIZE, 659 .ivsize = AES_BLOCK_SIZE, 660 .chunksize = AES_BLOCK_SIZE, [all …]
|
A D | ghash-ce-glue.c | 495 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt() 524 src += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 525 dst += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 586 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt() 587 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt() 620 src += nblocks * AES_BLOCK_SIZE; in gcm_decrypt() 621 dst += nblocks * AES_BLOCK_SIZE; in gcm_decrypt() 721 .chunksize = AES_BLOCK_SIZE, 722 .maxauthsize = AES_BLOCK_SIZE, 736 .chunksize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/net/mac80211/ |
A D | fils_aead.c | 28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v() 41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v() 46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v() 51 AES_BLOCK_SIZE); in aes_s2v() 71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt() 106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt() 155 u8 frame_iv[AES_BLOCK_SIZE], iv[AES_BLOCK_SIZE]; in aes_siv_decrypt() 156 u8 check[AES_BLOCK_SIZE]; in aes_siv_decrypt() 263 skb_put(skb, AES_BLOCK_SIZE); in fils_encrypt_assoc_req() 317 if (crypt_len < AES_BLOCK_SIZE) { in fils_decrypt_assoc_resp() [all …]
|
/linux-6.3-rc2/drivers/crypto/nx/ |
A D | nx-aes-xcbc.c | 23 u8 state[AES_BLOCK_SIZE]; 25 u8 buffer[AES_BLOCK_SIZE]; 64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty() 114 len = AES_BLOCK_SIZE; in nx_xcbc_empty() 118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty() 189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update() 201 data_len = AES_BLOCK_SIZE; in nx_xcbc_update() 225 leftover = AES_BLOCK_SIZE; in nx_xcbc_update() 259 AES_BLOCK_SIZE); in nx_xcbc_update() 332 len = AES_BLOCK_SIZE; in nx_xcbc_final() [all …]
|
A D | nx-aes-gcm.c | 112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 155 AES_BLOCK_SIZE); in nx_gca() 224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac() 226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac() 248 char out[AES_BLOCK_SIZE]; in gcm_empty() 266 len = AES_BLOCK_SIZE; in gcm_empty() 272 if (len != AES_BLOCK_SIZE) in gcm_empty() 378 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 380 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 482 .maxauthsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/arch/powerpc/crypto/ |
A D | aes-spe-glue.c | 327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt() 330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt() 365 u8 b[3][AES_BLOCK_SIZE]; in ppc_xts_decrypt() 369 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_decrypt() 388 AES_BLOCK_SIZE); in ppc_xts_decrypt() 418 .cra_blocksize = AES_BLOCK_SIZE, 438 .base.cra_blocksize = AES_BLOCK_SIZE, 455 .ivsize = AES_BLOCK_SIZE, 468 .ivsize = AES_BLOCK_SIZE, 472 .chunksize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/arch/s390/crypto/ |
A D | aes_s390.c | 66 u8 buf[AES_BLOCK_SIZE]; 326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() 372 .ivsize = AES_BLOCK_SIZE, 523 .ivsize = AES_BLOCK_SIZE, 558 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init() 559 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init() 560 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init() 581 n = AES_BLOCK_SIZE; in ctr_aes_crypt() 590 AES_BLOCK_SIZE); in ctr_aes_crypt() 622 .ivsize = AES_BLOCK_SIZE, [all …]
|
A D | paes_s390.c | 333 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() 388 .ivsize = AES_BLOCK_SIZE, 565 .ivsize = AES_BLOCK_SIZE, 631 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init() 632 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init() 633 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init() 661 n = AES_BLOCK_SIZE; in ctr_paes_crypt() 670 AES_BLOCK_SIZE); in ctr_paes_crypt() 694 walk.iv) == AES_BLOCK_SIZE) in ctr_paes_crypt() 722 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/drivers/crypto/ccp/ |
A D | ccp-crypto.h | 109 u8 k1[AES_BLOCK_SIZE]; 110 u8 k2[AES_BLOCK_SIZE]; 115 u8 iv[AES_BLOCK_SIZE]; 118 u8 tag[AES_BLOCK_SIZE]; 122 u8 rfc3686_iv[AES_BLOCK_SIZE]; 142 u8 iv[AES_BLOCK_SIZE]; 146 u8 buf[AES_BLOCK_SIZE]; 150 u8 pad[AES_BLOCK_SIZE]; 158 u8 iv[AES_BLOCK_SIZE]; 161 u8 buf[AES_BLOCK_SIZE]; [all …]
|
A D | ccp-crypto-aes.c | 33 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 79 (req->cryptlen & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt() 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 88 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt() 216 .base.cra_blocksize = AES_BLOCK_SIZE, 256 .blocksize = AES_BLOCK_SIZE, 265 .blocksize = AES_BLOCK_SIZE, 266 .ivsize = AES_BLOCK_SIZE, 275 .ivsize = AES_BLOCK_SIZE, 284 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/drivers/crypto/vmx/ |
A D | aes_ctr.c | 73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final() 87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt() 120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 147 .ivsize = AES_BLOCK_SIZE, 148 .chunksize = AES_BLOCK_SIZE,
|
A D | aes_xts.c | 84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt() 87 if (req->cryptlen < AES_BLOCK_SIZE) in p8_aes_xts_crypt() 120 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 125 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt() 152 .base.cra_blocksize = AES_BLOCK_SIZE, 161 .ivsize = AES_BLOCK_SIZE,
|
/linux-6.3-rc2/drivers/crypto/ |
A D | padlock-aes.c | 329 .cra_blocksize = AES_BLOCK_SIZE, 359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() 385 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_decrypt() 398 .base.cra_blocksize = AES_BLOCK_SIZE, 425 nbytes / AES_BLOCK_SIZE); in cbc_aes_encrypt() 427 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_encrypt() 451 nbytes / AES_BLOCK_SIZE); in cbc_aes_decrypt() 452 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_decrypt() [all …]
|
A D | atmel-aes.c | 158 u8 lastc[AES_BLOCK_SIZE]; 885 case AES_BLOCK_SIZE: in atmel_aes_dma_start() 1325 .ivsize = AES_BLOCK_SIZE, 1339 .ivsize = AES_BLOCK_SIZE, 1353 .ivsize = AES_BLOCK_SIZE, 1367 .ivsize = AES_BLOCK_SIZE, 2237 .ivsize = AES_BLOCK_SIZE, 2253 .ivsize = AES_BLOCK_SIZE, 2269 .ivsize = AES_BLOCK_SIZE, 2285 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/arch/sparc/crypto/ |
A D | aes_glue.c | 284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt() 310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt() 329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final() 331 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final() 350 round_down(nbytes, AES_BLOCK_SIZE), in ctr_crypt() 367 .cra_blocksize = AES_BLOCK_SIZE, 387 .base.cra_blocksize = AES_BLOCK_SIZE, 400 .base.cra_blocksize = AES_BLOCK_SIZE, 406 .ivsize = AES_BLOCK_SIZE, 420 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/arch/x86/crypto/ |
A D | aesni-intel_glue.c | 307 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 329 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 351 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 514 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt() 563 u8 keystream[AES_BLOCK_SIZE]; in xctr_crypt() 1057 .ivsize = AES_BLOCK_SIZE, 1073 .ivsize = AES_BLOCK_SIZE, 1091 .ivsize = AES_BLOCK_SIZE, 1109 .ivsize = AES_BLOCK_SIZE, 1137 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/drivers/crypto/keembay/ |
A D | keembay-ocs-aes-core.c | 105 u8 in_tag[AES_BLOCK_SIZE]; 108 u8 out_tag[AES_BLOCK_SIZE]; 230 u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE]; in sg_swap_blocks() 1320 .ivsize = AES_BLOCK_SIZE, 1341 .ivsize = AES_BLOCK_SIZE, 1363 .ivsize = AES_BLOCK_SIZE, 1405 .ivsize = AES_BLOCK_SIZE, 1425 .ivsize = AES_BLOCK_SIZE, 1446 .ivsize = AES_BLOCK_SIZE, 1494 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-6.3-rc2/drivers/crypto/ccree/ |
A D | cc_aead.h | 17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3) 52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned; 55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned; 56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned; 57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
|
/linux-6.3-rc2/lib/crypto/ |
A D | aesgcm.c | 49 u8 kin[AES_BLOCK_SIZE] = {}; in aesgcm_expandkey() 80 u8 buf[AES_BLOCK_SIZE]; in aesgcm_mac() 98 u8 buf[AES_BLOCK_SIZE]; in aesgcm_crypt() 111 crypto_xor_cpy(dst, src, buf, min(len, AES_BLOCK_SIZE)); in aesgcm_crypt() 113 dst += AES_BLOCK_SIZE; in aesgcm_crypt() 114 src += AES_BLOCK_SIZE; in aesgcm_crypt() 115 len -= AES_BLOCK_SIZE; in aesgcm_crypt() 170 u8 tagbuf[AES_BLOCK_SIZE]; in aesgcm_decrypt() 684 u8 tagbuf[AES_BLOCK_SIZE]; in libaesgcm_init()
|
/linux-6.3-rc2/drivers/crypto/qat/qat_common/ |
A D | qat_algs.c | 482 memcpy(key_reverse + AES_BLOCK_SIZE, key - AES_BLOCK_SIZE, in qat_alg_xts_reverse_key() 483 AES_BLOCK_SIZE); in qat_alg_xts_reverse_key() 1283 .cra_blocksize = AES_BLOCK_SIZE, 1292 .ivsize = AES_BLOCK_SIZE, 1300 .cra_blocksize = AES_BLOCK_SIZE, 1309 .ivsize = AES_BLOCK_SIZE, 1317 .cra_blocksize = AES_BLOCK_SIZE, 1326 .ivsize = AES_BLOCK_SIZE, 1347 .ivsize = AES_BLOCK_SIZE, 1365 .ivsize = AES_BLOCK_SIZE, [all …]
|