| /crypto/ |
| A D | ecc.c | 724 tmp[0] = 0; in vli_mmod_fast_192() 729 tmp[0] = tmp[1] = product[5]; in vli_mmod_fast_192() 730 tmp[2] = 0; in vli_mmod_fast_192() 750 tmp[0] = 0; in vli_mmod_fast_256() 754 carry = vli_lshift(tmp, tmp, 1, ndigits); in vli_mmod_fast_256() 761 carry += vli_lshift(tmp, tmp, 1, ndigits); in vli_mmod_fast_256() 767 tmp[2] = 0; in vli_mmod_fast_256() 781 tmp[2] = 0; in vli_mmod_fast_256() 788 tmp[2] = 0; in vli_mmod_fast_256() 839 carry = vli_lshift(tmp, tmp, 1, ndigits); in vli_mmod_fast_384() [all …]
|
| A D | aegis128-core.c | 63 union aegis_block tmp; in crypto_aegis128_update() local 156 union aegis_block tmp; in crypto_aegis128_encrypt_chunk() local 165 tmp = state->blocks[2]; in crypto_aegis128_encrypt_chunk() 173 *dst_blk = tmp; in crypto_aegis128_encrypt_chunk() 181 tmp = state->blocks[2]; in crypto_aegis128_encrypt_chunk() 201 tmp = state->blocks[2]; in crypto_aegis128_encrypt_chunk() 217 union aegis_block tmp; in crypto_aegis128_decrypt_chunk() local 226 tmp = state->blocks[2]; in crypto_aegis128_decrypt_chunk() 234 *dst_blk = tmp; in crypto_aegis128_decrypt_chunk() 262 tmp = state->blocks[2]; in crypto_aegis128_decrypt_chunk() [all …]
|
| A D | authencesn.c | 94 u32 tmp[2]; in crypto_authenc_esn_genicv_tail() local 97 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); in crypto_authenc_esn_genicv_tail() 99 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); in crypto_authenc_esn_genicv_tail() 126 u32 tmp[2]; in crypto_authenc_esn_genicv() local 132 scatterwalk_map_and_copy(tmp, dst, 0, 8, 0); in crypto_authenc_esn_genicv() 133 scatterwalk_map_and_copy(tmp, dst, 4, 4, 1); in crypto_authenc_esn_genicv() 209 u32 tmp[2]; in crypto_authenc_esn_decrypt_tail() local 215 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); in crypto_authenc_esn_decrypt_tail() 217 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); in crypto_authenc_esn_decrypt_tail() 256 u32 tmp[2]; in crypto_authenc_esn_decrypt() local [all …]
|
| A D | polyval-generic.c | 116 u8 tmp[POLYVAL_BLOCK_SIZE]; in polyval_update() local 119 copy_and_reverse(tmp, src); in polyval_update() 120 crypto_xor(dctx->buffer, tmp, POLYVAL_BLOCK_SIZE); in polyval_update() 135 u8 tmp[POLYVAL_BLOCK_SIZE] = {}; in polyval_finup() local 137 memcpy(tmp, src, len); in polyval_finup() 138 polyval_update(desc, tmp, POLYVAL_BLOCK_SIZE); in polyval_finup()
|
| A D | cipher.c | 69 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); in cipher_crypt_one() local 71 memcpy(tmp, src, bs); in cipher_crypt_one() 72 fn(crypto_cipher_tfm(tfm), tmp, tmp); in cipher_crypt_one() 73 memcpy(dst, tmp, bs); in cipher_crypt_one()
|
| A D | jitterentropy-kcapi.c | 88 __u64 tmp = 0; in jent_get_nstime() local 90 tmp = random_get_entropy(); in jent_get_nstime() 97 if (tmp == 0) in jent_get_nstime() 98 tmp = ktime_get_ns(); in jent_get_nstime() 100 *out = tmp; in jent_get_nstime() 101 jent_raw_hires_entropy_store(tmp); in jent_get_nstime()
|
| A D | ansi_cprng.c | 84 unsigned char tmp[DEFAULT_BLK_SZ]; in _get_more_prng_bytes() local 106 memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 108 hexdump("tmp stage 0: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 117 xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 118 hexdump("tmp stage 1: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 147 xor_vectors(ctx->rand_data, ctx->I, tmp, in _get_more_prng_bytes() 150 hexdump("tmp stage 2: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 156 crypto_cipher_encrypt_one(ctx->tfm, output, tmp); in _get_more_prng_bytes()
|
| A D | camellia_generic.c | 991 u32 tmp[4]; in camellia_encrypt() local 993 tmp[0] = get_unaligned_be32(in); in camellia_encrypt() 994 tmp[1] = get_unaligned_be32(in + 4); in camellia_encrypt() 995 tmp[2] = get_unaligned_be32(in + 8); in camellia_encrypt() 1006 put_unaligned_be32(tmp[2], out); in camellia_encrypt() 1007 put_unaligned_be32(tmp[3], out + 4); in camellia_encrypt() 1008 put_unaligned_be32(tmp[0], out + 8); in camellia_encrypt() 1017 u32 tmp[4]; in camellia_decrypt() local 1019 tmp[0] = get_unaligned_be32(in); in camellia_decrypt() 1020 tmp[1] = get_unaligned_be32(in + 4); in camellia_decrypt() [all …]
|
| A D | jitterentropy-testing.c | 184 u8 *tmp, *tmp_aligned; in jent_testing_extract_user() local 198 tmp = kmalloc(125 * sizeof(u64) + sizeof(u64), GFP_KERNEL); in jent_testing_extract_user() 199 if (!tmp) in jent_testing_extract_user() 202 tmp_aligned = PTR_ALIGN(tmp, sizeof(u64)); in jent_testing_extract_user() 233 kfree_sensitive(tmp); in jent_testing_extract_user()
|
| A D | hkdf.c | 83 u8 tmp[HASH_MAX_DIGESTSIZE] = {}; in hkdf_expand() local 109 err = crypto_shash_finup(desc, &counter, 1, tmp); in hkdf_expand() 112 memcpy(&okm[i], tmp, okmlen - i); in hkdf_expand() 113 memzero_explicit(tmp, sizeof(tmp)); in hkdf_expand() 127 memzero_explicit(tmp, HASH_MAX_DIGESTSIZE); in hkdf_expand()
|
| A D | twofish_common.c | 470 tmp = poly_to_exp[key[i] - 1]; \ 471 (a) ^= exp_to_poly[tmp + (w)]; \ 472 (b) ^= exp_to_poly[tmp + (x)]; \ 473 (c) ^= exp_to_poly[tmp + (y)]; \ 474 (d) ^= exp_to_poly[tmp + (z)]; \ 583 u8 tmp; in __twofish_setkey() local
|
| A D | ctr.c | 34 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_ctr_crypt_final() local 35 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_ctr_crypt_final() 82 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_ctr_crypt_inplace() local 83 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_ctr_crypt_inplace()
|
| A D | scatterwalk.c | 182 void *tmp = walk->page; in skcipher_next_copy() local 185 memcpy(tmp, walk->in.addr, walk->nbytes); in skcipher_next_copy() 192 walk->in.__addr = tmp; in skcipher_next_copy() 193 walk->out.__addr = tmp; in skcipher_next_copy()
|
| A D | xctr.c | 82 u8 tmp[XCTR_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_xctr_crypt_inplace() local 83 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_xctr_crypt_inplace()
|
| A D | lrw.c | 75 be128 tmp = { 0 }; in lrw_setkey() local 95 lrw_setbit128_bbe(&tmp, i); in lrw_setkey() 96 ctx->mulinc[i] = tmp; in lrw_setkey()
|
| A D | dh.c | 340 u64 tmp = be64_to_cpu(dst[i - 1]); in __add_u64_to_be() local 342 tmp += val; in __add_u64_to_be() 343 val = tmp >= val ? 0 : 1; in __add_u64_to_be() 344 dst[i - 1] = cpu_to_be64(tmp); in __add_u64_to_be()
|
| A D | algif_aead.c | 77 struct af_alg_tsgl *tsgl, *tmp; in _aead_recvmsg() local 158 list_for_each_entry_safe(tsgl, tmp, &ctx->tsgl_list, list) { in _aead_recvmsg()
|
| A D | drbg.c | 814 unsigned char *tmp = drbg->scratchpad + drbg_statelen(drbg); in drbg_hash_df() local 829 ret = drbg_kcapi_hash(drbg, tmp, entropylist); in drbg_hash_df() 836 memcpy(outval + len, tmp, blocklen); in drbg_hash_df() 841 memset(tmp, 0, drbg_blocklen(drbg)); in drbg_hash_df()
|
| A D | af_alg.c | 775 struct af_alg_rsgl *rsgl, *tmp; in af_alg_free_areq_sgls() local 780 list_for_each_entry_safe(rsgl, tmp, &areq->rsgl_list, list) { in af_alg_free_areq_sgls()
|
| /crypto/asymmetric_keys/ |
| A D | verify_pefile.c | 249 unsigned *canon, tmp, loop, i, hashed_bytes; in pefile_digest_pe_contents() local 259 tmp = ctx->image_checksum_offset + sizeof(uint32_t); in pefile_digest_pe_contents() 260 ret = crypto_shash_update(desc, pebuf + tmp, in pefile_digest_pe_contents() 261 ctx->cert_dirent_offset - tmp); in pefile_digest_pe_contents() 265 tmp = ctx->cert_dirent_offset + sizeof(struct data_dirent); in pefile_digest_pe_contents() 266 ret = crypto_shash_update(desc, pebuf + tmp, ctx->header_size - tmp); in pefile_digest_pe_contents() 307 tmp = hashed_bytes + ctx->certs_size; in pefile_digest_pe_contents() 310 pelen - tmp); in pefile_digest_pe_contents()
|
| /crypto/krb5/ |
| A D | rfc6803_camellia.c | 33 __be32 tmp; in rfc6803_calc_KDF_FEEDBACK_CMAC() local 73 tmp = htonl(k); in rfc6803_calc_KDF_FEEDBACK_CMAC() 74 memcpy(p, &tmp, 4); in rfc6803_calc_KDF_FEEDBACK_CMAC()
|
| A D | rfc8009_aes2.c | 39 __be32 tmp; in rfc8009_calc_KDF_HMAC_SHA2() local 84 tmp = htonl(k); in rfc8009_calc_KDF_HMAC_SHA2() 85 memcpy(p, &tmp, 4); in rfc8009_calc_KDF_HMAC_SHA2()
|
| /crypto/async_tx/ |
| A D | async_xor.c | 38 dma_addr_t tmp; in do_async_xor() local 62 tmp = src_list[0]; in do_async_xor() 80 src_list[0] = tmp; in do_async_xor()
|