| /crypto/ |
| A D | cast5_generic.c | 327 t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); in __cast5_encrypt() 328 t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); in __cast5_encrypt() 329 t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); in __cast5_encrypt() 330 t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); in __cast5_encrypt() 331 t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); in __cast5_encrypt() 332 t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); in __cast5_encrypt() 333 t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); in __cast5_encrypt() 334 t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); in __cast5_encrypt() 335 t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); in __cast5_encrypt() 336 t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]); in __cast5_encrypt() [all …]
|
| A D | aegis.h | 65 const u32 *t = crypto_ft_tab[0]; in crypto_aegis_aesenc() local 68 d0 = t[s[ 0]] ^ rol32(t[s[ 5]], 8) ^ rol32(t[s[10]], 16) ^ rol32(t[s[15]], 24); in crypto_aegis_aesenc() 69 d1 = t[s[ 4]] ^ rol32(t[s[ 9]], 8) ^ rol32(t[s[14]], 16) ^ rol32(t[s[ 3]], 24); in crypto_aegis_aesenc() 70 d2 = t[s[ 8]] ^ rol32(t[s[13]], 8) ^ rol32(t[s[ 2]], 16) ^ rol32(t[s[ 7]], 24); in crypto_aegis_aesenc() 71 d3 = t[s[12]] ^ rol32(t[s[ 1]], 8) ^ rol32(t[s[ 6]], 16) ^ rol32(t[s[11]], 24); in crypto_aegis_aesenc()
|
| A D | sha3_generic.c | 49 u64 t[5], tt, bc[5]; in keccakf_round() local 58 t[0] = bc[4] ^ rol64(bc[1], 1); in keccakf_round() 59 t[1] = bc[0] ^ rol64(bc[2], 1); in keccakf_round() 60 t[2] = bc[1] ^ rol64(bc[3], 1); in keccakf_round() 61 t[3] = bc[2] ^ rol64(bc[4], 1); in keccakf_round() 62 t[4] = bc[3] ^ rol64(bc[0], 1); in keccakf_round() 64 st[0] ^= t[0]; in keccakf_round() 68 st[ 1] = rol64(st[ 6] ^ t[1], 44); in keccakf_round() 69 st[ 6] = rol64(st[ 9] ^ t[4], 20); in keccakf_round() 70 st[ 9] = rol64(st[22] ^ t[2], 61); in keccakf_round() [all …]
|
| A D | xts.c | 35 le128 t; member 90 le128 t = rctx->t; in xts_xor_tweak() local 113 rctx->t = t; in xts_xor_tweak() 114 gf128mul_x_ble(&t, &t); in xts_xor_tweak() 116 le128_xor(wdst, &t, wsrc); in xts_xor_tweak() 118 gf128mul_x_ble(&rctx->t, &t); in xts_xor_tweak() 123 le128_xor(wdst++, &t, wsrc++); in xts_xor_tweak() 124 gf128mul_x_ble(&t, &t); in xts_xor_tweak() 152 le128_xor(&b, &rctx->t, &b); in xts_cts_done() 178 le128_xor(b, &rctx->t, b); in xts_cts_final() [all …]
|
| A D | lrw.c | 53 be128 t; member 146 be128 t = rctx->t; in lrw_xor_tweak() local 177 be128_xor(wdst++, &t, wsrc++); in lrw_xor_tweak() 181 be128_xor(&t, &t, in lrw_xor_tweak() 237 memcpy(&rctx->t, req->iv, sizeof(rctx->t)); in lrw_init_crypt() 240 gf128mul_64k_bbe(&rctx->t, ctx->table); in lrw_init_crypt()
|
| A D | blake2b_generic.c | 42 S->t[0] += inc; in blake2b_increment_counter() 43 S->t[1] += (S->t[0] < inc); in blake2b_increment_counter() 87 v[12] = BLAKE2B_IV4 ^ S->t[0]; in blake2b_compress_one_generic() 88 v[13] = BLAKE2B_IV5 ^ S->t[1]; in blake2b_compress_one_generic()
|
| A D | ecc.c | 558 u64 t[ECC_MAX_DIGITS * 2]; in vli_mmod_special() local 563 vli_umult(t, r + ndigits, c, ndigits); in vli_mmod_special() 565 vli_add(r, r, t, ndigits * 2); in vli_mmod_special() 567 vli_set(t, mod, ndigits); in vli_mmod_special() 568 vli_clear(t + ndigits, ndigits); in vli_mmod_special() 569 while (vli_cmp(r, t, ndigits * 2) >= 0) in vli_mmod_special() 570 vli_sub(r, r, t, ndigits * 2); in vli_mmod_special()
|
| A D | fcrypt.c | 60 u32 t = lo & ((1 << n) - 1); \ 62 hi = (hi >> n) | (t << (24-n)); \
|
| A D | Kconfig | 767 implementation currently can't handle a sectorsize which is not a
|
| /crypto/asymmetric_keys/ |
| A D | Kconfig | 63 just the payload. If it isn't, adding the key will fail with an
|