| /crypto/ |
| A D | aegis128-neon-inner.c | 223 unsigned int size) in crypto_aegis128_encrypt_chunk_neon() argument 231 while (size >= AEGIS_BLOCK_SIZE) { in crypto_aegis128_encrypt_chunk_neon() 239 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_encrypt_chunk_neon() 244 if (size > 0) { in crypto_aegis128_encrypt_chunk_neon() 252 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_encrypt_chunk_neon() 263 memcpy(dst, out, size); in crypto_aegis128_encrypt_chunk_neon() 272 unsigned int size) in crypto_aegis128_decrypt_chunk_neon() argument 285 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_decrypt_chunk_neon() 290 if (size > 0) { in crypto_aegis128_decrypt_chunk_neon() 298 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_decrypt_chunk_neon() [all …]
|
| A D | aegis128-core.c | 134 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_ad() 141 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_ad() 175 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_encrypt_chunk() 191 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_encrypt_chunk() 197 if (size > 0) { in crypto_aegis128_encrypt_chunk() 236 size -= AEGIS_BLOCK_SIZE; in crypto_aegis128_decrypt_chunk() 258 if (size > 0) { in crypto_aegis128_decrypt_chunk() 268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); in crypto_aegis128_decrypt_chunk() 289 unsigned int left = size; in crypto_aegis128_process_ad() 309 assoclen -= size; in crypto_aegis128_process_ad() [all …]
|
| A D | dh_helper.c | 15 static inline u8 *dh_pack_data(u8 *dst, u8 *end, const void *src, size_t size) in dh_pack_data() argument 17 if (!dst || size > end - dst) in dh_pack_data() 19 memcpy(dst, src, size); in dh_pack_data() 20 return dst + size; in dh_pack_data() 23 static inline const u8 *dh_unpack_data(void *dst, const void *src, size_t size) in dh_unpack_data() argument 25 memcpy(dst, src, size); in dh_unpack_data() 26 return src + size; in dh_unpack_data()
|
| A D | aegis128-neon.c | 40 const u8 *src, unsigned int size) in crypto_aegis128_encrypt_chunk_simd() argument 43 crypto_aegis128_encrypt_chunk_neon(state, dst, src, size); in crypto_aegis128_encrypt_chunk_simd() 48 const u8 *src, unsigned int size) in crypto_aegis128_decrypt_chunk_simd() argument 51 crypto_aegis128_decrypt_chunk_neon(state, dst, src, size); in crypto_aegis128_decrypt_chunk_simd()
|
| A D | rsassa-pkcs1.c | 95 size_t size; member 145 return len != p->data[p->size - 1]; in rsassa_pkcs1_invalid_hash_len() 181 if (slen + hash_prefix->size > ctx->key_size - 11) in rsassa_pkcs1_sign() 184 pad_len = ctx->key_size - slen - hash_prefix->size - 1; in rsassa_pkcs1_sign() 188 memmove(in_buf + pad_len + hash_prefix->size, src, slen); in rsassa_pkcs1_sign() 189 memcpy(in_buf + pad_len, hash_prefix->data, hash_prefix->size); in rsassa_pkcs1_sign() 285 if (hash_prefix->size > dst_len - pos) in rsassa_pkcs1_verify() 287 if (crypto_memneq(out_buf + pos, hash_prefix->data, hash_prefix->size)) in rsassa_pkcs1_verify() 289 pos += hash_prefix->size; in rsassa_pkcs1_verify()
|
| A D | aegis-neon.h | 9 unsigned int size); 11 unsigned int size);
|
| A D | zstd.c | 138 outbuf.size = dcur; in zstd_compress() 153 inbuf.size = scur; in zstd_compress() 242 inbuf.size = scur; in zstd_decompress() 265 outbuf.size = dcur; in zstd_decompress()
|
| A D | jitterentropy-kcapi.c | 228 int size, ret = 0; in jent_kcapi_init() local 248 size = sizeof(struct shash_desc) + crypto_shash_descsize(hash); in jent_kcapi_init() 249 sdesc = kmalloc(size, GFP_KERNEL); in jent_kcapi_init()
|
| A D | aegis.h | 37 const u8 *src, unsigned int size); 39 const u8 *src, unsigned int size);
|
| A D | algapi.c | 1003 static inline void crypto_inc_byte(u8 *a, unsigned int size) in crypto_inc_byte() argument 1005 u8 *b = (a + size); in crypto_inc_byte() 1008 for (; size; size--) { in crypto_inc_byte() 1016 void crypto_inc(u8 *a, unsigned int size) in crypto_inc() argument 1018 __be32 *b = (__be32 *)(a + size); in crypto_inc() 1023 for (; size >= 4; size -= 4) { in crypto_inc() 1030 crypto_inc_byte(a, size); in crypto_inc()
|
| A D | algif_skcipher.c | 38 size_t size) in skcipher_sendmsg() argument 47 return af_alg_sendmsg(sock, msg, size, ivsize); in skcipher_sendmsg() 301 size_t size) in skcipher_sendmsg_nokey() argument 309 return skcipher_sendmsg(sock, msg, size); in skcipher_sendmsg_nokey()
|
| A D | algif_aead.c | 54 static int aead_sendmsg(struct socket *sock, struct msghdr *msg, size_t size) in aead_sendmsg() argument 63 return af_alg_sendmsg(sock, msg, size, ivsize); in aead_sendmsg() 382 size_t size) in aead_sendmsg_nokey() argument 390 return aead_sendmsg(sock, msg, size); in aead_sendmsg_nokey()
|
| A D | deflate.c | 37 size_t size = max(zlib_inflate_workspacesize(), in deflate_alloc_stream() local 42 ctx = kvmalloc(sizeof(*ctx) + size, GFP_KERNEL); in deflate_alloc_stream()
|
| A D | scatterwalk.c | 256 unsigned size; in skcipher_copy_iv() local 260 size = aligned_stride + ivsize + in skcipher_copy_iv() 263 walk->buffer = kmalloc(size, skcipher_walk_gfp(walk)); in skcipher_copy_iv()
|
| A D | af_alg.c | 938 int af_alg_sendmsg(struct socket *sock, struct msghdr *msg, size_t size, in af_alg_sendmsg() argument 993 while (size) { in af_alg_sendmsg() 995 size_t len = size; in af_alg_sendmsg() 1018 size -= len; in af_alg_sendmsg() 1060 size -= plen; in af_alg_sendmsg() 1090 size -= plen; in af_alg_sendmsg() 1097 if (!size) in af_alg_sendmsg()
|
| A D | ansi_cprng.c | 69 unsigned char *out, unsigned int size) in xor_vectors() argument 73 for (i = 0; i < size; i++) in xor_vectors()
|
| A D | algif_hash.c | 330 size_t size) in hash_sendmsg_nokey() argument 338 return hash_sendmsg(sock, msg, size); in hash_sendmsg_nokey()
|
| A D | Kconfig | 506 on 32-bit processors. Khazad uses an 128 bit key size. 521 It is a 16 round block cipher with the key size of 128 bit. 766 Use with aes-xts-plain, key size 256, 384 or 512 bits. This 895 of any size between 1 and 64 bytes. The keyed hash is also implemented. 1253 size fits into a cache (e.g. L1), only the memory access timing 1256 obtained. Thus, if the memory size fits into the L1 cache, the 1257 obtained entropy is less than if the memory size fits within 1259 L1 + L2 + L3. Thus, by selecting a different memory size,
|
| A D | testmgr.c | 940 static void flip_random_bit(struct rnd_state *rng, u8 *buf, size_t size) in flip_random_bit() argument 944 bitpos = prandom_u32_below(rng, size * 8); in flip_random_bit() 949 static void flip_random_byte(struct rnd_state *rng, u8 *buf, size_t size) in flip_random_byte() argument 951 buf[prandom_u32_below(rng, size)] ^= 0xff; in flip_random_byte() 955 static void mutate_buffer(struct rnd_state *rng, u8 *buf, size_t size) in mutate_buffer() argument 963 size * 8); in mutate_buffer() 965 flip_random_bit(rng, buf, size); in mutate_buffer() 970 num_flips = min_t(size_t, 1 << prandom_u32_below(rng, 8), size); in mutate_buffer() 972 flip_random_byte(rng, buf, size); in mutate_buffer()
|
| /crypto/asymmetric_keys/ |
| A D | verify_pefile.c | 95 ctx->certs_size = ddir->certs.size; in pefile_parse_binary() 97 if (!ddir->certs.virtual_address || !ddir->certs.size) { in pefile_parse_binary() 103 ddir->certs.size); in pefile_parse_binary() 105 ctx->sig_len = ddir->certs.size; in pefile_parse_binary()
|