| /linux/arch/arm64/crypto/ |
| A D | aes-glue.c | 130 u8 dg[AES_BLOCK_SIZE]; 460 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() 659 .ivsize = AES_BLOCK_SIZE, 674 .ivsize = AES_BLOCK_SIZE, 690 .ivsize = AES_BLOCK_SIZE, 707 .ivsize = AES_BLOCK_SIZE, 783 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey() 831 in += AES_BLOCK_SIZE; in mac_do_update() 852 len %= AES_BLOCK_SIZE; in mac_update() 915 2 * AES_BLOCK_SIZE, [all …]
|
| A D | aes-neonbs-glue.c | 59 } __aligned(AES_BLOCK_SIZE); 210 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() 292 AES_BLOCK_SIZE) - 2; in __xts_crypt() 333 in += blocks * AES_BLOCK_SIZE; in __xts_crypt() 396 .walksize = 8 * AES_BLOCK_SIZE, 410 .walksize = 8 * AES_BLOCK_SIZE, 411 .ivsize = AES_BLOCK_SIZE, 425 .chunksize = AES_BLOCK_SIZE, 426 .walksize = 8 * AES_BLOCK_SIZE, 427 .ivsize = AES_BLOCK_SIZE, [all …]
|
| A D | aes-ce-ccm-glue.c | 80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt() 152 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt() 161 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt() 173 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 209 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt() 210 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt() 219 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt() 273 .ivsize = AES_BLOCK_SIZE, 274 .chunksize = AES_BLOCK_SIZE, [all …]
|
| A D | ghash-ce-glue.c | 347 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt() 348 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() 376 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt() 389 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt() 413 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt() 414 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt() 415 u8 iv[AES_BLOCK_SIZE]; in gcm_decrypt() 448 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_decrypt() 461 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_decrypt() 476 .chunksize = AES_BLOCK_SIZE, [all …]
|
| /linux/arch/arm/crypto/ |
| A D | aes-neonbs-glue.c | 49 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE); 240 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() 352 u8 buf[2 * AES_BLOCK_SIZE]; in __xts_crypt() 398 AES_BLOCK_SIZE, 0); in __xts_crypt() 412 AES_BLOCK_SIZE + tail, 1); in __xts_crypt() 454 .ivsize = AES_BLOCK_SIZE, 471 .chunksize = AES_BLOCK_SIZE, 473 .ivsize = AES_BLOCK_SIZE, 487 .chunksize = AES_BLOCK_SIZE, 489 .ivsize = AES_BLOCK_SIZE, [all …]
|
| A D | aes-ce-glue.c | 54 u8 b[AES_BLOCK_SIZE]; 462 AES_BLOCK_SIZE) - 2; in xts_encrypt() 534 AES_BLOCK_SIZE) - 2; in xts_decrypt() 613 .ivsize = AES_BLOCK_SIZE, 628 .ivsize = AES_BLOCK_SIZE, 629 .walksize = 2 * AES_BLOCK_SIZE, 644 .ivsize = AES_BLOCK_SIZE, 645 .chunksize = AES_BLOCK_SIZE, 659 .ivsize = AES_BLOCK_SIZE, 660 .chunksize = AES_BLOCK_SIZE, [all …]
|
| /linux/net/mac80211/ |
| A D | fils_aead.c | 28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v() 41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v() 46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v() 51 AES_BLOCK_SIZE); in aes_s2v() 71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt() 106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt() 155 u8 frame_iv[AES_BLOCK_SIZE], iv[AES_BLOCK_SIZE]; in aes_siv_decrypt() 156 u8 check[AES_BLOCK_SIZE]; in aes_siv_decrypt() 263 skb_put(skb, AES_BLOCK_SIZE); in fils_encrypt_assoc_req() 317 if (crypt_len < AES_BLOCK_SIZE) { in fils_decrypt_assoc_resp() [all …]
|
| /linux/drivers/crypto/nx/ |
| A D | nx-aes-xcbc.c | 23 u8 state[AES_BLOCK_SIZE]; 25 u8 buffer[AES_BLOCK_SIZE]; 64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty() 114 len = AES_BLOCK_SIZE; in nx_xcbc_empty() 118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty() 189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update() 201 data_len = AES_BLOCK_SIZE; in nx_xcbc_update() 225 leftover = AES_BLOCK_SIZE; in nx_xcbc_update() 259 AES_BLOCK_SIZE); in nx_xcbc_update() 332 len = AES_BLOCK_SIZE; in nx_xcbc_final() [all …]
|
| A D | nx-aes-gcm.c | 112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 155 AES_BLOCK_SIZE); in nx_gca() 224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac() 226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac() 248 char out[AES_BLOCK_SIZE]; in gcm_empty() 266 len = AES_BLOCK_SIZE; in gcm_empty() 272 if (len != AES_BLOCK_SIZE) in gcm_empty() 378 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 380 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 482 .maxauthsize = AES_BLOCK_SIZE, [all …]
|
| A D | nx-aes-ccm.c | 311 AES_BLOCK_SIZE); in generate_pat() 324 memcpy(out, result, AES_BLOCK_SIZE); in generate_pat() 384 memcpy(iv, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_decrypt() 386 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_decrypt() 388 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_decrypt() 453 memcpy(iv, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_encrypt() 455 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_encrypt() 457 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_encrypt() 540 .ivsize = AES_BLOCK_SIZE, 541 .maxauthsize = AES_BLOCK_SIZE, [all …]
|
| /linux/arch/powerpc/crypto/ |
| A D | aes-spe-glue.c | 327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt() 330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt() 365 u8 b[3][AES_BLOCK_SIZE]; in ppc_xts_decrypt() 369 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_decrypt() 388 AES_BLOCK_SIZE); in ppc_xts_decrypt() 418 .cra_blocksize = AES_BLOCK_SIZE, 438 .base.cra_blocksize = AES_BLOCK_SIZE, 455 .ivsize = AES_BLOCK_SIZE, 468 .ivsize = AES_BLOCK_SIZE, 472 .chunksize = AES_BLOCK_SIZE, [all …]
|
| /linux/arch/s390/crypto/ |
| A D | aes_s390.c | 66 u8 buf[AES_BLOCK_SIZE]; 326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() 372 .ivsize = AES_BLOCK_SIZE, 527 .ivsize = AES_BLOCK_SIZE, 562 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init() 563 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init() 564 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init() 585 n = AES_BLOCK_SIZE; in ctr_aes_crypt() 594 AES_BLOCK_SIZE); in ctr_aes_crypt() 626 .ivsize = AES_BLOCK_SIZE, [all …]
|
| A D | paes_s390.c | 333 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() 388 .ivsize = AES_BLOCK_SIZE, 565 .ivsize = AES_BLOCK_SIZE, 631 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init() 632 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init() 633 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init() 661 n = AES_BLOCK_SIZE; in ctr_paes_crypt() 670 AES_BLOCK_SIZE); in ctr_paes_crypt() 694 walk.iv) == AES_BLOCK_SIZE) in ctr_paes_crypt() 722 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/drivers/crypto/ccp/ |
| A D | ccp-crypto.h | 109 u8 k1[AES_BLOCK_SIZE]; 110 u8 k2[AES_BLOCK_SIZE]; 115 u8 iv[AES_BLOCK_SIZE]; 118 u8 tag[AES_BLOCK_SIZE]; 122 u8 rfc3686_iv[AES_BLOCK_SIZE]; 142 u8 iv[AES_BLOCK_SIZE]; 146 u8 buf[AES_BLOCK_SIZE]; 150 u8 pad[AES_BLOCK_SIZE]; 158 u8 iv[AES_BLOCK_SIZE]; 161 u8 buf[AES_BLOCK_SIZE]; [all …]
|
| A D | ccp-crypto-aes.c | 32 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 79 (req->cryptlen & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt() 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 88 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt() 218 .base.cra_blocksize = AES_BLOCK_SIZE, 258 .blocksize = AES_BLOCK_SIZE, 267 .blocksize = AES_BLOCK_SIZE, 268 .ivsize = AES_BLOCK_SIZE, 277 .ivsize = AES_BLOCK_SIZE, 286 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/drivers/crypto/vmx/ |
| A D | aes_ctr.c | 73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final() 87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt() 120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 147 .ivsize = AES_BLOCK_SIZE, 148 .chunksize = AES_BLOCK_SIZE,
|
| A D | aes_xts.c | 84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt() 87 if (req->cryptlen < AES_BLOCK_SIZE) in p8_aes_xts_crypt() 120 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 125 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt() 152 .base.cra_blocksize = AES_BLOCK_SIZE, 161 .ivsize = AES_BLOCK_SIZE,
|
| /linux/drivers/crypto/ |
| A D | padlock-aes.c | 329 .cra_blocksize = AES_BLOCK_SIZE, 359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() 385 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_decrypt() 398 .base.cra_blocksize = AES_BLOCK_SIZE, 425 nbytes / AES_BLOCK_SIZE); in cbc_aes_encrypt() 427 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_encrypt() 451 nbytes / AES_BLOCK_SIZE); in cbc_aes_decrypt() 452 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_decrypt() [all …]
|
| A D | atmel-aes.c | 158 u8 lastc[AES_BLOCK_SIZE]; 885 case AES_BLOCK_SIZE: in atmel_aes_dma_start() 1326 .ivsize = AES_BLOCK_SIZE, 1340 .ivsize = AES_BLOCK_SIZE, 1354 .ivsize = AES_BLOCK_SIZE, 1368 .ivsize = AES_BLOCK_SIZE, 2241 .ivsize = AES_BLOCK_SIZE, 2257 .ivsize = AES_BLOCK_SIZE, 2273 .ivsize = AES_BLOCK_SIZE, 2289 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/arch/sparc/crypto/ |
| A D | aes_glue.c | 284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt() 310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt() 329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final() 331 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final() 350 round_down(nbytes, AES_BLOCK_SIZE), in ctr_crypt() 367 .cra_blocksize = AES_BLOCK_SIZE, 387 .base.cra_blocksize = AES_BLOCK_SIZE, 400 .base.cra_blocksize = AES_BLOCK_SIZE, 406 .ivsize = AES_BLOCK_SIZE, 420 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/arch/x86/crypto/ |
| A D | aesni-intel_glue.c | 293 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 315 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 337 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 359 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt() 500 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt() 990 .ivsize = AES_BLOCK_SIZE, 1006 .ivsize = AES_BLOCK_SIZE, 1007 .walksize = 2 * AES_BLOCK_SIZE, 1024 .ivsize = AES_BLOCK_SIZE, 1025 .chunksize = AES_BLOCK_SIZE, [all …]
|
| /linux/drivers/crypto/keembay/ |
| A D | keembay-ocs-aes-core.c | 105 u8 in_tag[AES_BLOCK_SIZE]; 108 u8 out_tag[AES_BLOCK_SIZE]; 230 u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE]; in sg_swap_blocks() 1320 .ivsize = AES_BLOCK_SIZE, 1341 .ivsize = AES_BLOCK_SIZE, 1363 .ivsize = AES_BLOCK_SIZE, 1405 .ivsize = AES_BLOCK_SIZE, 1425 .ivsize = AES_BLOCK_SIZE, 1446 .ivsize = AES_BLOCK_SIZE, 1494 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/drivers/crypto/ccree/ |
| A D | cc_aead.h | 17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3) 52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned; 55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned; 56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned; 57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
|
| A D | cc_cipher.c | 115 if (size >= AES_BLOCK_SIZE) in validate_data_size() 1020 .ivsize = AES_BLOCK_SIZE, 1038 .ivsize = AES_BLOCK_SIZE, 1074 .ivsize = AES_BLOCK_SIZE, 1092 .ivsize = AES_BLOCK_SIZE, 1110 .ivsize = AES_BLOCK_SIZE, 1128 .ivsize = AES_BLOCK_SIZE, 1150 .ivsize = AES_BLOCK_SIZE, 1167 .ivsize = AES_BLOCK_SIZE, 1201 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /linux/drivers/crypto/qat/qat_common/ |
| A D | qat_algs.c | 494 memcpy(key_reverse + AES_BLOCK_SIZE, key - AES_BLOCK_SIZE, in qat_alg_xts_reverse_key() 495 AES_BLOCK_SIZE); in qat_alg_xts_reverse_key() 1438 .cra_blocksize = AES_BLOCK_SIZE, 1447 .ivsize = AES_BLOCK_SIZE, 1455 .cra_blocksize = AES_BLOCK_SIZE, 1464 .ivsize = AES_BLOCK_SIZE, 1472 .cra_blocksize = AES_BLOCK_SIZE, 1481 .ivsize = AES_BLOCK_SIZE, 1502 .ivsize = AES_BLOCK_SIZE, 1520 .ivsize = AES_BLOCK_SIZE, [all …]
|