| /arch/x86/crypto/ |
| A D | sm4_aesni_avx_glue.c | 42 unsigned int nbytes; in ecb_do_crypt() local 47 while ((nbytes = walk.nbytes) > 0) { in ecb_do_crypt() 101 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 135 while ((nbytes = walk.nbytes) > 0) { in sm4_avx_cbc_decrypt() 145 nbytes -= bsize; in sm4_avx_cbc_decrypt() 200 while ((nbytes = walk.nbytes) > 0) { in sm4_avx_ctr_crypt() 210 nbytes -= bsize; in sm4_avx_ctr_crypt() 236 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_avx_ctr_crypt() 245 dst += nbytes; in sm4_avx_ctr_crypt() 246 src += nbytes; in sm4_avx_ctr_crypt() [all …]
|
| A D | des3_ede_glue.c | 70 unsigned int nbytes; in ecb_crypt() local 75 while ((nbytes = walk.nbytes)) { in ecb_crypt() 100 nbytes -= bsize; in ecb_crypt() 130 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local 142 nbytes -= bsize; in __cbc_encrypt() 146 return nbytes; in __cbc_encrypt() 154 unsigned int nbytes; in cbc_encrypt() local 171 unsigned int nbytes = walk->nbytes; in __cbc_decrypt() local 198 nbytes -= bsize; in __cbc_decrypt() 212 nbytes -= bsize; in __cbc_decrypt() [all …]
|
| A D | aria_gfni_avx512_glue.c | 80 unsigned int nbytes; in aria_avx512_ctr_encrypt() local 85 while ((nbytes = walk.nbytes) > 0) { in aria_avx512_ctr_encrypt() 119 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 122 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 134 nbytes -= ARIA_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 137 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx512_ctr_encrypt() 146 nbytes); in aria_avx512_ctr_encrypt() 147 dst += nbytes; in aria_avx512_ctr_encrypt() 148 src += nbytes; in aria_avx512_ctr_encrypt() 149 nbytes = 0; in aria_avx512_ctr_encrypt() [all …]
|
| A D | ecb_cbc_helpers.h | 20 while (walk.nbytes > 0) { \ 21 unsigned int nbytes = walk.nbytes; \ 23 nbytes >= __fpu_blocks * __bsize; \ 35 nbytes -= (blocks) * __bsize; \ 44 while (nbytes >= __blocks * __bsize) { \ 52 while (nbytes >= __bsize) { \ 67 while (nbytes >= __blocks * __bsize) { \ 80 err = skcipher_walk_done(&walk, nbytes); \
|
| A D | aria_aesni_avx2_glue.c | 94 unsigned int nbytes; in aria_avx2_ctr_encrypt() local 99 while ((nbytes = walk.nbytes) > 0) { in aria_avx2_ctr_encrypt() 122 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 125 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 136 nbytes -= ARIA_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 139 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx2_ctr_encrypt() 148 nbytes); in aria_avx2_ctr_encrypt() 149 dst += nbytes; in aria_avx2_ctr_encrypt() 150 src += nbytes; in aria_avx2_ctr_encrypt() 151 nbytes = 0; in aria_avx2_ctr_encrypt() [all …]
|
| A D | aria_aesni_avx_glue.c | 92 unsigned int nbytes; in aria_avx_ctr_encrypt() local 97 while ((nbytes = walk.nbytes) > 0) { in aria_avx_ctr_encrypt() 109 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx_ctr_encrypt() 112 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx_ctr_encrypt() 123 nbytes -= ARIA_BLOCK_SIZE; in aria_avx_ctr_encrypt() 126 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx_ctr_encrypt() 135 nbytes); in aria_avx_ctr_encrypt() 136 dst += nbytes; in aria_avx_ctr_encrypt() 137 src += nbytes; in aria_avx_ctr_encrypt() 138 nbytes = 0; in aria_avx_ctr_encrypt() [all …]
|
| A D | aesni-intel_glue.c | 165 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 187 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 209 while ((nbytes = walk.nbytes)) { in cbc_encrypt() 231 while ((nbytes = walk.nbytes)) { in cbc_decrypt() 368 while ((nbytes = walk.nbytes) > 0) { in ctr_crypt_aesni() 376 if (walk.nbytes == walk.total && nbytes > 0) { in ctr_crypt_aesni() 673 while ((nbytes = walk.nbytes) != 0) { in ctr_crypt() 676 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ctr_crypt() 733 while ((nbytes = walk.nbytes) != 0) { in xctr_crypt() 735 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in xctr_crypt() [all …]
|
| /arch/arm64/crypto/ |
| A D | sm4-neon-glue.c | 39 unsigned int nbytes; in sm4_ecb_do_crypt() local 44 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 85 unsigned int nbytes; in sm4_cbc_encrypt() local 90 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 101 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 117 unsigned int nbytes; in sm4_cbc_decrypt() local 122 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_decrypt() 148 unsigned int nbytes; in sm4_ctr_crypt() local 153 while ((nbytes = walk.nbytes) > 0) { in sm4_ctr_crypt() 173 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_ctr_crypt() [all …]
|
| A D | aes-neonbs-glue.c | 217 while (walk.nbytes > 0) { in ctr_encrypt() 219 int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE); in ctr_encrypt() local 230 if (nbytes && walk.nbytes == walk.total) { in ctr_encrypt() 236 src, nbytes); in ctr_encrypt() 239 nbytes, walk.iv); in ctr_encrypt() 244 nbytes = 0; in ctr_encrypt() 288 int nbytes, err; in __xts_crypt() local 321 nbytes = walk.nbytes; in __xts_crypt() 338 if (walk.nbytes == walk.total && nbytes > 0) { in __xts_crypt() 347 nbytes = first = 0; in __xts_crypt() [all …]
|
| A D | ghash-ce-glue.c | 320 int nbytes = walk.nbytes; in gcm_encrypt() local 324 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_encrypt() 326 src, nbytes); in gcm_encrypt() 341 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt() 343 buf + sizeof(buf) - nbytes, nbytes); in gcm_encrypt() 345 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in gcm_encrypt() 390 int nbytes = walk.nbytes; in gcm_decrypt() local 394 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_decrypt() 411 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_decrypt() 413 buf + sizeof(buf) - nbytes, nbytes); in gcm_decrypt() [all …]
|
| A D | aes-glue.c | 463 unsigned int nbytes = walk.nbytes; in xctr_encrypt() local 477 src, nbytes); in xctr_encrypt() 488 buf + sizeof(buf) - nbytes, nbytes); in xctr_encrypt() 491 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xctr_encrypt() 508 unsigned int nbytes = walk.nbytes; in ctr_encrypt() local 533 buf + sizeof(buf) - nbytes, nbytes); in ctr_encrypt() 535 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ctr_encrypt() 577 int nbytes = walk.nbytes; in xts_encrypt() local 587 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt() 649 int nbytes = walk.nbytes; in xts_decrypt() local [all …]
|
| A D | sm4-ce-glue.c | 24 #define BYTES2BLKS(nbytes) ((nbytes) >> 4) argument 110 unsigned int nbytes; in sm4_ecb_do_crypt() local 115 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 156 unsigned int nbytes; in sm4_cbc_crypt() local 163 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_crypt() 281 unsigned int nbytes; in sm4_ctr_crypt() local 286 while ((nbytes = walk.nbytes) > 0) { in sm4_ctr_crypt() 302 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_ctr_crypt() 308 nbytes = 0; in sm4_ctr_crypt() 358 while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) { in sm4_xts_crypt() [all …]
|
| A D | aes-ce-ccm-glue.c | 196 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 202 if (walk.nbytes == walk.total) { in ccm_encrypt() 207 if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) in ccm_encrypt() 209 src, walk.nbytes); in ccm_encrypt() 218 if (walk.nbytes) { in ccm_encrypt() 221 } while (walk.nbytes); in ccm_encrypt() 263 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_decrypt() 269 if (walk.nbytes == walk.total) { in ccm_decrypt() 276 src, walk.nbytes); in ccm_decrypt() 285 if (walk.nbytes) { in ccm_decrypt() [all …]
|
| /arch/powerpc/crypto/ |
| A D | aes-spe-glue.c | 190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt() 191 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ecb_crypt() 192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt() 229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt() 230 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_cbc_crypt() 231 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_cbc_crypt() 270 while ((nbytes = walk.nbytes) != 0) { in ppc_ctr_crypt() 271 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ctr_crypt() 273 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ctr_crypt() 298 while ((nbytes = walk.nbytes) != 0) { in ppc_xts_crypt() [all …]
|
| A D | aes_ctr.c | 80 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() local 90 crypto_xor_cpy(dst, keystream, src, nbytes); in p8_aes_ctr_final() 99 unsigned int nbytes; in p8_aes_ctr_crypt() local 111 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 117 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt() 125 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 127 ret = skcipher_walk_done(&walk, nbytes); in p8_aes_ctr_crypt() 129 if (nbytes) { in p8_aes_ctr_crypt()
|
| /arch/riscv/crypto/ |
| A D | aes-riscv64-glue.c | 140 unsigned int nbytes; in riscv64_aes_ecb_crypt() local 144 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ecb_crypt() 178 unsigned int nbytes; in riscv64_aes_cbc_crypt() local 182 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_cbc_crypt() 290 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ctr_crypt() 293 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in riscv64_aes_ctr_crypt() 331 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in riscv64_aes_ctr_crypt() 398 while (walk.nbytes) { in riscv64_aes_xts_crypt() 399 unsigned int nbytes = walk.nbytes; in riscv64_aes_xts_crypt() local 402 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in riscv64_aes_xts_crypt() [all …]
|
| /arch/x86/include/asm/ |
| A D | insn.h | 23 unsigned char nbytes; member 30 p->nbytes = n; in insn_field_set() 49 unsigned char nbytes; member 57 p->nbytes = n; in insn_field_set() 173 return insn->rex_prefix.nbytes == 2; in insn_is_rex2() 192 return (insn->vex_prefix.nbytes == 4); in insn_is_evex() 202 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_m_bits() 212 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_p_bits() 220 if (insn->vex_prefix.nbytes < 3) in insn_vex_w_bit() 240 return insn->prefixes.nbytes; in insn_offset_rex_prefix() [all …]
|
| /arch/sparc/crypto/ |
| A D | aes_glue.c | 223 unsigned int nbytes; in ecb_encrypt() local 231 while ((nbytes = walk.nbytes) != 0) { in ecb_encrypt() 247 unsigned int nbytes; in ecb_decrypt() local 256 while ((nbytes = walk.nbytes) != 0) { in ecb_decrypt() 272 unsigned int nbytes; in cbc_encrypt() local 280 while ((nbytes = walk.nbytes) != 0) { in cbc_encrypt() 297 unsigned int nbytes; in cbc_decrypt() local 306 while ((nbytes = walk.nbytes) != 0) { in cbc_decrypt() 325 unsigned int nbytes = walk->nbytes; in ctr_crypt_final() local 346 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt() [all …]
|
| A D | camellia_glue.c | 94 unsigned int nbytes; in __ecb_crypt() local 110 while ((nbytes = walk.nbytes) != 0) { in __ecb_crypt() 112 round_down(nbytes, CAMELLIA_BLOCK_SIZE), key); in __ecb_crypt() 113 err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE); in __ecb_crypt() 144 unsigned int nbytes; in cbc_encrypt() local 157 while ((nbytes = walk.nbytes) != 0) { in cbc_encrypt() 159 round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv); in cbc_encrypt() 160 err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE); in cbc_encrypt() 173 unsigned int nbytes; in cbc_decrypt() local 186 while ((nbytes = walk.nbytes) != 0) { in cbc_decrypt() [all …]
|
| A D | des_glue.c | 99 unsigned int nbytes; in __ecb_crypt() local 110 while ((nbytes = walk.nbytes) != 0) { in __ecb_crypt() 140 unsigned int nbytes; in __cbc_crypt() local 151 while ((nbytes = walk.nbytes) != 0) { in __cbc_crypt() 155 round_down(nbytes, in __cbc_crypt() 161 round_down(nbytes, in __cbc_crypt() 249 unsigned int nbytes; in __ecb3_crypt() local 261 while ((nbytes = walk.nbytes) != 0) { in __ecb3_crypt() 295 unsigned int nbytes; in __cbc3_crypt() local 307 while ((nbytes = walk.nbytes) != 0) { in __cbc3_crypt() [all …]
|
| /arch/arm/crypto/ |
| A D | aes-ce-glue.c | 398 if (walk.nbytes) { in ctr_encrypt() 401 unsigned int nbytes = walk.nbytes; in ctr_encrypt() local 413 crypto_xor_cpy(tdst, tsrc, tail, nbytes); in ctr_encrypt() 455 int nbytes = walk.nbytes; in xts_encrypt() local 457 if (walk.nbytes < walk.total) in xts_encrypt() 458 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt() 465 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt() 527 int nbytes = walk.nbytes; in xts_decrypt() local 529 if (walk.nbytes < walk.total) in xts_decrypt() 530 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt() [all …]
|
| A D | aes-neonbs-glue.c | 91 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 94 if (walk.nbytes < walk.total) in __ecb_crypt() 143 unsigned int nbytes; in cbc_encrypt() local 148 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in cbc_encrypt() 160 nbytes -= AES_BLOCK_SIZE; in cbc_encrypt() 161 } while (nbytes >= AES_BLOCK_SIZE); in cbc_encrypt() 180 if (walk.nbytes < walk.total) in cbc_decrypt() 206 while (walk.nbytes > 0) { in ctr_encrypt() 209 unsigned int bytes = walk.nbytes; in ctr_encrypt() 214 else if (walk.nbytes < walk.total) in ctr_encrypt() [all …]
|
| /arch/s390/crypto/ |
| A D | prng.c | 160 while (nbytes) { in generate_entropy() 171 n = (nbytes < 64) ? nbytes : 64; in generate_entropy() 175 nbytes -= n; in generate_entropy() 211 while (nbytes >= 8) { in prng_tdes_seed() 215 nbytes -= 8; in prng_tdes_seed() 500 return nbytes; in prng_sha512_generate() 521 while (nbytes) { in prng_tdes_read() 578 nbytes -= chunk; in prng_tdes_read() 604 while (nbytes) { in prng_sha512_read() 636 n = nbytes; in prng_sha512_read() [all …]
|
| A D | des_s390.c | 94 unsigned int nbytes, n; in ecb_desall_crypt() local 98 while ((nbytes = walk.nbytes) != 0) { in ecb_desall_crypt() 100 n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 113 unsigned int nbytes, n; in cbc_desall_crypt() local 125 while ((nbytes = walk.nbytes) != 0) { in cbc_desall_crypt() 127 n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 308 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1); in __ctrblk_init() 324 unsigned int n, nbytes; in ctr_desall_crypt() local 330 while ((nbytes = walk.nbytes) >= DES_BLOCK_SIZE) { in ctr_desall_crypt() 346 if (nbytes) { in ctr_desall_crypt() [all …]
|
| A D | aes_s390.c | 73 unsigned int nbytes; member 238 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_crypt() 342 while ((nbytes = walk.nbytes) != 0) { in cbc_aes_crypt() 468 while ((nbytes = walk.nbytes) != 0) { in xts_aes_crypt() 596 while ((nbytes = walk.nbytes) != 0) { in fullxts_aes_crypt() 686 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_aes_crypt() 705 if (nbytes) { in ctr_aes_crypt() 817 gw->nbytes = 0; in gcm_in_walk_go() 823 gw->nbytes = 0; in gcm_in_walk_go() 858 gw->nbytes = 0; in gcm_out_walk_go() [all …]
|