Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 – 25 of 105) sorted by relevance

12345

/linux/arch/arm64/crypto/
A Daes-glue.c130 u8 dg[AES_BLOCK_SIZE];
460 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
659 .ivsize = AES_BLOCK_SIZE,
674 .ivsize = AES_BLOCK_SIZE,
690 .ivsize = AES_BLOCK_SIZE,
707 .ivsize = AES_BLOCK_SIZE,
783 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey()
831 in += AES_BLOCK_SIZE; in mac_do_update()
852 len %= AES_BLOCK_SIZE; in mac_update()
915 2 * AES_BLOCK_SIZE,
[all …]
A Daes-neonbs-glue.c59 } __aligned(AES_BLOCK_SIZE);
210 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
292 AES_BLOCK_SIZE) - 2; in __xts_crypt()
333 in += blocks * AES_BLOCK_SIZE; in __xts_crypt()
396 .walksize = 8 * AES_BLOCK_SIZE,
410 .walksize = 8 * AES_BLOCK_SIZE,
411 .ivsize = AES_BLOCK_SIZE,
425 .chunksize = AES_BLOCK_SIZE,
426 .walksize = 8 * AES_BLOCK_SIZE,
427 .ivsize = AES_BLOCK_SIZE,
[all …]
A Daes-ce-ccm-glue.c80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt()
152 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt()
161 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt()
173 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt()
209 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt()
210 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt()
219 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt()
273 .ivsize = AES_BLOCK_SIZE,
274 .chunksize = AES_BLOCK_SIZE,
[all …]
A Dghash-ce-glue.c347 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt()
348 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt()
376 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt()
389 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt()
413 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt()
414 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt()
415 u8 iv[AES_BLOCK_SIZE]; in gcm_decrypt()
448 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_decrypt()
461 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_decrypt()
476 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/arch/arm/crypto/
A Daes-neonbs-glue.c49 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
240 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
352 u8 buf[2 * AES_BLOCK_SIZE]; in __xts_crypt()
398 AES_BLOCK_SIZE, 0); in __xts_crypt()
412 AES_BLOCK_SIZE + tail, 1); in __xts_crypt()
454 .ivsize = AES_BLOCK_SIZE,
471 .chunksize = AES_BLOCK_SIZE,
473 .ivsize = AES_BLOCK_SIZE,
487 .chunksize = AES_BLOCK_SIZE,
489 .ivsize = AES_BLOCK_SIZE,
[all …]
A Daes-ce-glue.c54 u8 b[AES_BLOCK_SIZE];
462 AES_BLOCK_SIZE) - 2; in xts_encrypt()
534 AES_BLOCK_SIZE) - 2; in xts_decrypt()
613 .ivsize = AES_BLOCK_SIZE,
628 .ivsize = AES_BLOCK_SIZE,
629 .walksize = 2 * AES_BLOCK_SIZE,
644 .ivsize = AES_BLOCK_SIZE,
645 .chunksize = AES_BLOCK_SIZE,
659 .ivsize = AES_BLOCK_SIZE,
660 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/net/mac80211/
A Dfils_aead.c28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v()
41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v()
46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v()
51 AES_BLOCK_SIZE); in aes_s2v()
71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt()
106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt()
155 u8 frame_iv[AES_BLOCK_SIZE], iv[AES_BLOCK_SIZE]; in aes_siv_decrypt()
156 u8 check[AES_BLOCK_SIZE]; in aes_siv_decrypt()
263 skb_put(skb, AES_BLOCK_SIZE); in fils_encrypt_assoc_req()
317 if (crypt_len < AES_BLOCK_SIZE) { in fils_decrypt_assoc_resp()
[all …]
/linux/drivers/crypto/nx/
A Dnx-aes-xcbc.c23 u8 state[AES_BLOCK_SIZE];
25 u8 buffer[AES_BLOCK_SIZE];
64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
114 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update()
201 data_len = AES_BLOCK_SIZE; in nx_xcbc_update()
225 leftover = AES_BLOCK_SIZE; in nx_xcbc_update()
259 AES_BLOCK_SIZE); in nx_xcbc_update()
332 len = AES_BLOCK_SIZE; in nx_xcbc_final()
[all …]
A Dnx-aes-gcm.c112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca()
155 AES_BLOCK_SIZE); in nx_gca()
224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac()
226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac()
248 char out[AES_BLOCK_SIZE]; in gcm_empty()
266 len = AES_BLOCK_SIZE; in gcm_empty()
272 if (len != AES_BLOCK_SIZE) in gcm_empty()
378 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
380 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
482 .maxauthsize = AES_BLOCK_SIZE,
[all …]
A Dnx-aes-ccm.c311 AES_BLOCK_SIZE); in generate_pat()
324 memcpy(out, result, AES_BLOCK_SIZE); in generate_pat()
384 memcpy(iv, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_decrypt()
386 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_decrypt()
388 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_decrypt()
453 memcpy(iv, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_encrypt()
455 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_encrypt()
457 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_encrypt()
540 .ivsize = AES_BLOCK_SIZE,
541 .maxauthsize = AES_BLOCK_SIZE,
[all …]
/linux/arch/powerpc/crypto/
A Daes-spe-glue.c327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt()
330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt()
365 u8 b[3][AES_BLOCK_SIZE]; in ppc_xts_decrypt()
369 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_decrypt()
388 AES_BLOCK_SIZE); in ppc_xts_decrypt()
418 .cra_blocksize = AES_BLOCK_SIZE,
438 .base.cra_blocksize = AES_BLOCK_SIZE,
455 .ivsize = AES_BLOCK_SIZE,
468 .ivsize = AES_BLOCK_SIZE,
472 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/arch/s390/crypto/
A Daes_s390.c66 u8 buf[AES_BLOCK_SIZE];
326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt()
372 .ivsize = AES_BLOCK_SIZE,
527 .ivsize = AES_BLOCK_SIZE,
562 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init()
563 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init()
564 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init()
585 n = AES_BLOCK_SIZE; in ctr_aes_crypt()
594 AES_BLOCK_SIZE); in ctr_aes_crypt()
626 .ivsize = AES_BLOCK_SIZE,
[all …]
A Dpaes_s390.c333 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
388 .ivsize = AES_BLOCK_SIZE,
565 .ivsize = AES_BLOCK_SIZE,
631 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init()
632 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init()
633 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init()
661 n = AES_BLOCK_SIZE; in ctr_paes_crypt()
670 AES_BLOCK_SIZE); in ctr_paes_crypt()
694 walk.iv) == AES_BLOCK_SIZE) in ctr_paes_crypt()
722 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/ccp/
A Dccp-crypto.h109 u8 k1[AES_BLOCK_SIZE];
110 u8 k2[AES_BLOCK_SIZE];
115 u8 iv[AES_BLOCK_SIZE];
118 u8 tag[AES_BLOCK_SIZE];
122 u8 rfc3686_iv[AES_BLOCK_SIZE];
142 u8 iv[AES_BLOCK_SIZE];
146 u8 buf[AES_BLOCK_SIZE];
150 u8 pad[AES_BLOCK_SIZE];
158 u8 iv[AES_BLOCK_SIZE];
161 u8 buf[AES_BLOCK_SIZE];
[all …]
A Dccp-crypto-aes.c32 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete()
79 (req->cryptlen & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt()
86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt()
88 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt()
218 .base.cra_blocksize = AES_BLOCK_SIZE,
258 .blocksize = AES_BLOCK_SIZE,
267 .blocksize = AES_BLOCK_SIZE,
268 .ivsize = AES_BLOCK_SIZE,
277 .ivsize = AES_BLOCK_SIZE,
286 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/vmx/
A Daes_ctr.c73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
147 .ivsize = AES_BLOCK_SIZE,
148 .chunksize = AES_BLOCK_SIZE,
A Daes_xts.c84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt()
87 if (req->cryptlen < AES_BLOCK_SIZE) in p8_aes_xts_crypt()
120 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt()
125 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt()
131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt()
152 .base.cra_blocksize = AES_BLOCK_SIZE,
161 .ivsize = AES_BLOCK_SIZE,
/linux/drivers/crypto/
A Dpadlock-aes.c329 .cra_blocksize = AES_BLOCK_SIZE,
359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt()
360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt()
384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt()
385 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_decrypt()
398 .base.cra_blocksize = AES_BLOCK_SIZE,
425 nbytes / AES_BLOCK_SIZE); in cbc_aes_encrypt()
427 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_encrypt()
451 nbytes / AES_BLOCK_SIZE); in cbc_aes_decrypt()
452 nbytes &= AES_BLOCK_SIZE - 1; in cbc_aes_decrypt()
[all …]
A Datmel-aes.c158 u8 lastc[AES_BLOCK_SIZE];
885 case AES_BLOCK_SIZE: in atmel_aes_dma_start()
1326 .ivsize = AES_BLOCK_SIZE,
1340 .ivsize = AES_BLOCK_SIZE,
1354 .ivsize = AES_BLOCK_SIZE,
1368 .ivsize = AES_BLOCK_SIZE,
2241 .ivsize = AES_BLOCK_SIZE,
2257 .ivsize = AES_BLOCK_SIZE,
2273 .ivsize = AES_BLOCK_SIZE,
2289 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/arch/sparc/crypto/
A Daes_glue.c284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt()
310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt()
329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
331 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
350 round_down(nbytes, AES_BLOCK_SIZE), in ctr_crypt()
367 .cra_blocksize = AES_BLOCK_SIZE,
387 .base.cra_blocksize = AES_BLOCK_SIZE,
400 .base.cra_blocksize = AES_BLOCK_SIZE,
406 .ivsize = AES_BLOCK_SIZE,
420 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/arch/x86/crypto/
A Daesni-intel_glue.c293 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
315 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
337 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt()
359 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt()
500 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt()
990 .ivsize = AES_BLOCK_SIZE,
1006 .ivsize = AES_BLOCK_SIZE,
1007 .walksize = 2 * AES_BLOCK_SIZE,
1024 .ivsize = AES_BLOCK_SIZE,
1025 .chunksize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/keembay/
A Dkeembay-ocs-aes-core.c105 u8 in_tag[AES_BLOCK_SIZE];
108 u8 out_tag[AES_BLOCK_SIZE];
230 u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE]; in sg_swap_blocks()
1320 .ivsize = AES_BLOCK_SIZE,
1341 .ivsize = AES_BLOCK_SIZE,
1363 .ivsize = AES_BLOCK_SIZE,
1405 .ivsize = AES_BLOCK_SIZE,
1425 .ivsize = AES_BLOCK_SIZE,
1446 .ivsize = AES_BLOCK_SIZE,
1494 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/ccree/
A Dcc_aead.h17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3)
52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned;
55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned;
56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned;
57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
A Dcc_cipher.c115 if (size >= AES_BLOCK_SIZE) in validate_data_size()
1020 .ivsize = AES_BLOCK_SIZE,
1038 .ivsize = AES_BLOCK_SIZE,
1074 .ivsize = AES_BLOCK_SIZE,
1092 .ivsize = AES_BLOCK_SIZE,
1110 .ivsize = AES_BLOCK_SIZE,
1128 .ivsize = AES_BLOCK_SIZE,
1150 .ivsize = AES_BLOCK_SIZE,
1167 .ivsize = AES_BLOCK_SIZE,
1201 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/qat/qat_common/
A Dqat_algs.c494 memcpy(key_reverse + AES_BLOCK_SIZE, key - AES_BLOCK_SIZE, in qat_alg_xts_reverse_key()
495 AES_BLOCK_SIZE); in qat_alg_xts_reverse_key()
1438 .cra_blocksize = AES_BLOCK_SIZE,
1447 .ivsize = AES_BLOCK_SIZE,
1455 .cra_blocksize = AES_BLOCK_SIZE,
1464 .ivsize = AES_BLOCK_SIZE,
1472 .cra_blocksize = AES_BLOCK_SIZE,
1481 .ivsize = AES_BLOCK_SIZE,
1502 .ivsize = AES_BLOCK_SIZE,
1520 .ivsize = AES_BLOCK_SIZE,
[all …]

Completed in 787 milliseconds

12345