| /arch/arm64/crypto/ |
| A D | sm4-neon-glue.c | 22 unsigned int nblocks); 24 u8 *iv, unsigned int nblocks); 26 u8 *iv, unsigned int nblocks); 47 unsigned int nblocks; in sm4_ecb_do_crypt() local 50 if (nblocks) { in sm4_ecb_do_crypt() 125 unsigned int nblocks; in sm4_cbc_decrypt() local 128 if (nblocks) { in sm4_cbc_decrypt() 132 walk.iv, nblocks); in sm4_cbc_decrypt() 156 unsigned int nblocks; in sm4_ctr_crypt() local 159 if (nblocks) { in sm4_ctr_crypt() [all …]
|
| A D | polyval-ce-glue.c | 42 const u8 *in, size_t nblocks, u8 *accumulator); 46 const u8 *in, size_t nblocks, u8 *accumulator) in internal_polyval_update() argument 49 pmull_polyval_update(keys, in, nblocks, accumulator); in internal_polyval_update() 94 unsigned int nblocks; in polyval_arm64_update() local 98 nblocks = min(srclen, 4096U) / POLYVAL_BLOCK_SIZE; in polyval_arm64_update() 99 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_arm64_update() 100 srclen -= nblocks * POLYVAL_BLOCK_SIZE; in polyval_arm64_update() 101 src += nblocks * POLYVAL_BLOCK_SIZE; in polyval_arm64_update()
|
| A D | sm4-ce-gcm-glue.c | 24 const u8 *src, unsigned int nblocks); 96 unsigned int nblocks; in gcm_calculate_auth_mac() local 109 nblocks = n / GHASH_BLOCK_SIZE; in gcm_calculate_auth_mac() 110 if (nblocks) { in gcm_calculate_auth_mac() 112 p, nblocks); in gcm_calculate_auth_mac() 113 p += nblocks * GHASH_BLOCK_SIZE; in gcm_calculate_auth_mac()
|
| A D | sm4-ce-glue.c | 32 u8 *iv, unsigned int nblocks); 34 u8 *iv, unsigned int nblocks); 48 const u8 *src, unsigned int nblocks, 166 unsigned int nblocks; in sm4_cbc_crypt() local 168 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_cbc_crypt() 169 if (nblocks) { in sm4_cbc_crypt() 174 walk.iv, nblocks); in sm4_cbc_crypt() 177 walk.iv, nblocks); in sm4_cbc_crypt() 349 nblocks * SM4_BLOCK_SIZE, req->iv); in sm4_xts_crypt() 600 unsigned int nblocks = len / SM4_BLOCK_SIZE; in sm4_mac_update() local [all …]
|
| A D | sm4-ce-ccm-glue.c | 22 const u8 *src, unsigned int nblocks); 123 unsigned int l, nblocks; in ccm_calculate_auth_mac() local 132 nblocks = n / SM4_BLOCK_SIZE; in ccm_calculate_auth_mac() 134 mac, p, nblocks); in ccm_calculate_auth_mac() 136 p += nblocks * SM4_BLOCK_SIZE; in ccm_calculate_auth_mac()
|
| A D | sm4-ce.h | 13 u8 *iv, unsigned int nblocks);
|
| /arch/x86/crypto/ |
| A D | sm4_aesni_avx_glue.c | 23 const u8 *src, int nblocks); 25 const u8 *src, int nblocks); 61 dst += nblocks * SM4_BLOCK_SIZE; in ecb_do_crypt() 62 src += nblocks * SM4_BLOCK_SIZE; in ecb_do_crypt() 155 src, nblocks); in sm4_avx_cbc_decrypt() 170 dst += nblocks * SM4_BLOCK_SIZE; in sm4_avx_cbc_decrypt() 218 for (i = 0; i < nblocks; i++) { in sm4_avx_ctr_crypt() 224 keystream, nblocks); in sm4_avx_ctr_crypt() 227 nblocks * SM4_BLOCK_SIZE); in sm4_avx_ctr_crypt() 228 dst += nblocks * SM4_BLOCK_SIZE; in sm4_avx_ctr_crypt() [all …]
|
| A D | polyval-clmulni_glue.c | 47 const u8 *in, size_t nblocks, u8 *accumulator); 56 const u8 *in, size_t nblocks, u8 *accumulator) in internal_polyval_update() argument 59 clmul_polyval_update(keys, in, nblocks, accumulator); in internal_polyval_update() 104 unsigned int nblocks; in polyval_x86_update() local 108 nblocks = min(srclen, 4096U) / POLYVAL_BLOCK_SIZE; in polyval_x86_update() 109 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_x86_update() 110 srclen -= nblocks * POLYVAL_BLOCK_SIZE; in polyval_x86_update() 111 src += nblocks * POLYVAL_BLOCK_SIZE; in polyval_x86_update()
|
| A D | sm3_avx_glue.c | 20 const u8 *data, int nblocks);
|
| A D | aesni-intel_glue.c | 662 unsigned int nbytes, p1_nbytes, nblocks; in ctr_crypt() local 677 nblocks = nbytes / AES_BLOCK_SIZE; in ctr_crypt() 680 nblocks = DIV_ROUND_UP(nbytes, AES_BLOCK_SIZE); in ctr_crypt() 682 ctr64 += nblocks; in ctr_crypt() 685 if (likely(ctr64 >= nblocks)) { in ctr_crypt() 697 (nblocks - ctr64) * AES_BLOCK_SIZE); in ctr_crypt()
|
| /arch/arm/crypto/ |
| A D | blake2b-neon-glue.c | 18 const u8 *block, size_t nblocks, u32 inc); 21 const u8 *block, size_t nblocks, u32 inc) in blake2b_compress_arch() argument 24 const size_t blocks = min_t(size_t, nblocks, in blake2b_compress_arch() 31 nblocks -= blocks; in blake2b_compress_arch() 33 } while (nblocks); in blake2b_compress_arch()
|
| A D | ghash-ce-glue.c | 327 int nblocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_encrypt() local 329 pmull_gcm_encrypt(nblocks, dg, src, ctx, dst, iv, in gcm_encrypt() 331 counter += nblocks; in gcm_encrypt() 334 src += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 335 dst += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 420 int nblocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_decrypt() local 422 pmull_gcm_decrypt(nblocks, dg, src, ctx, dst, iv, in gcm_decrypt() 424 counter += nblocks; in gcm_decrypt() 427 src += nblocks * AES_BLOCK_SIZE; in gcm_decrypt() 428 dst += nblocks * AES_BLOCK_SIZE; in gcm_decrypt()
|
| /arch/loongarch/vdso/ |
| A D | vgetrandom-chacha.S | 38 #define nblocks a3 macro 220 PTR_ADDI nblocks, nblocks, -1 221 bnez nblocks, .Lblock
|
| /arch/riscv/kernel/vdso/ |
| A D | vgetrandom-chacha.S | 37 #define nblocks a3 macro 218 addi nblocks, nblocks, -1 219 bnez nblocks, .Lblock
|
| /arch/riscv/crypto/ |
| A D | aes-riscv64-glue.c | 283 u32 ctr32, nblocks; in riscv64_aes_ctr_crypt() local 294 nblocks = nbytes / AES_BLOCK_SIZE; in riscv64_aes_ctr_crypt() 297 nblocks = DIV_ROUND_UP(nbytes, AES_BLOCK_SIZE); in riscv64_aes_ctr_crypt() 299 ctr32 += nblocks; in riscv64_aes_ctr_crypt() 302 if (ctr32 >= nblocks) { in riscv64_aes_ctr_crypt() 315 (nblocks - ctr32) * AES_BLOCK_SIZE); in riscv64_aes_ctr_crypt()
|
| /arch/x86/entry/vdso/ |
| A D | vgetrandom-chacha.S | 29 .set nblocks, %rcx define 162 decq nblocks
|
| /arch/powerpc/kernel/vdso/ |
| A D | vgetrandom-chacha.S | 13 #define nblocks r6 macro 179 mr idx_r0, nblocks
|