/lib/tests/ |
A D | hashtable_test.c | 37 DEFINE_HASHTABLE(hash, 1); in hashtable_test_hash_empty() 43 hash_add(hash, &a.node, a.key); in hashtable_test_hash_empty() 52 DEFINE_HASHTABLE(hash, 4); in hashtable_test_hash_hashed() 56 hash_add(hash, &a.node, a.key); in hashtable_test_hash_hashed() 59 hash_add(hash, &b.node, b.key); in hashtable_test_hash_hashed() 69 DEFINE_HASHTABLE(hash, 3); in hashtable_test_hash_add() 98 DEFINE_HASHTABLE(hash, 6); in hashtable_test_hash_del() 128 DEFINE_HASHTABLE(hash, 3); in hashtable_test_hash_for_each() 158 DEFINE_HASHTABLE(hash, 3); in hashtable_test_hash_for_each_safe() 191 DEFINE_HASHTABLE(hash, 5); in hashtable_test_hash_for_each_possible() [all …]
|
A D | siphash_kunit.c | 113 #define chk(hash, vector, fmt...) \ argument 114 KUNIT_EXPECT_EQ_MSG(test, hash, vector, fmt)
|
/lib/ |
A D | oid_registry.c | 31 unsigned i, j, k, hash; in look_up_OID() local 35 hash = datasize - 1; in look_up_OID() 38 hash += octets[i] * 33; in look_up_OID() 39 hash = (hash >> 24) ^ (hash >> 16) ^ (hash >> 8) ^ hash; in look_up_OID() 40 hash &= 0xff; in look_up_OID() 51 xhash = oid_search_table[j].hash; in look_up_OID() 52 if (xhash > hash) { in look_up_OID() 56 if (xhash < hash) { in look_up_OID()
|
A D | build_OID_registry | 125 my $hash = $#octets; 127 $hash += $_ * 33; 130 $hash = ($hash >> 24) ^ ($hash >> 16) ^ ($hash >> 8) ^ ($hash); 132 push @hash_values, $hash & 0xff;
|
A D | rhashtable.c | 508 rht_for_each_from(head, rht_ptr(bkt, tbl, hash), tbl, hash) { in rhashtable_lookup_one() 572 head = rht_ptr(bkt, tbl, hash); in rhashtable_insert_one() 597 unsigned int hash; in rhashtable_try_insert() local 607 bkt = rht_bucket_var(tbl, hash); in rhashtable_try_insert() 618 hash, key, obj); in rhashtable_try_insert() 620 hash, obj, data); in rhashtable_try_insert() 1194 unsigned int subhash = hash; in __rht_bucket_nested() 1204 tbl, hash); in __rht_bucket_nested() 1237 hash >>= tbl->nest; in rht_bucket_nested_insert() 1244 hash >>= shift; in rht_bucket_nested_insert() [all …]
|
A D | stackdepot.c | 430 depot_alloc_stack(unsigned long *entries, unsigned int nr_entries, u32 hash, depot_flags_t flags, v… in depot_alloc_stack() argument 463 stack->hash = hash; in depot_alloc_stack() 586 u32 hash, depot_flags_t flags) in find_stack() argument 602 if (stack->hash != hash || stack->size != size) in find_stack() 646 u32 hash; in stack_depot_save_flags() local 664 hash = hash_stack(entries, nr_entries); in stack_depot_save_flags() 665 bucket = &stack_table[hash & stack_hash_mask]; in stack_depot_save_flags() 668 found = find_stack(bucket, entries, nr_entries, hash, depot_flags); in stack_depot_save_flags() 695 found = find_stack(bucket, entries, nr_entries, hash, depot_flags); in stack_depot_save_flags() 698 depot_alloc_stack(entries, nr_entries, hash, depot_flags, &prealloc); in stack_depot_save_flags()
|
A D | digsig.c | 205 unsigned char hash[SHA1_DIGEST_SIZE]; in digsig_verify() local 244 crypto_shash_final(desc, hash); in digsig_verify() 250 hash, sizeof(hash)); in digsig_verify()
|
A D | debugobjects.c | 588 unsigned long hash; in get_bucket() local 590 hash = hash_long((addr >> ODEBUG_CHUNK_SHIFT), ODEBUG_HASH_BITS); in get_bucket() 591 return &obj_hash[hash]; in get_bucket()
|
A D | Kconfig.debug | 2508 tristate "Perform selftest on resizable hash table" 2684 tristate "KUnit Test for integer hash functions" if !KUNIT_ALL_TESTS 2689 integer (<linux/hash.h>) hash functions on boot. 2919 Enable this option to test the kernel's siphash (<linux/siphash.h>) hash
|
A D | test_bpf.c | 14751 skb->hash = SKB_HASH; in populate_skb()
|
/lib/crypto/mips/ |
A D | sha1.h | 25 u64 *hash = (u64 *)&state->h[0]; in octeon_sha1_store_hash() local 31 write_octeon_64bit_hash_dword(hash[0], 0); in octeon_sha1_store_hash() 32 write_octeon_64bit_hash_dword(hash[1], 1); in octeon_sha1_store_hash() 39 u64 *hash = (u64 *)&state->h[0]; in octeon_sha1_read_hash() local 45 hash[0] = read_octeon_64bit_hash_dword(0); in octeon_sha1_read_hash() 46 hash[1] = read_octeon_64bit_hash_dword(1); in octeon_sha1_read_hash()
|
/lib/zstd/compress/ |
A D | zstd_ldm.c | 69 U64 hash = state->rolling; in ZSTD_ldm_gear_reset() local 73 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_reset() 102 U64 hash, mask; in ZSTD_ldm_gear_feed() local 104 hash = state->rolling; in ZSTD_ldm_gear_feed() 109 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_feed() 111 if (UNLIKELY((hash & mask) == 0)) { \ in ZSTD_ldm_gear_feed() 132 state->rolling = hash; in ZSTD_ldm_gear_feed() 190 return ldmState->hashTable + (hash << bucketSizeLog); in ZSTD_ldm_getBucket() 199 BYTE* const pOffset = ldmState->bucketOffsets + hash; in ZSTD_ldm_insertEntry() 396 candidates[n].hash = hash; in ZSTD_ldm_generateSequences_internal() [all …]
|
A D | zstd_compress_internal.h | 328 U32 hash; member 976 static U64 ZSTD_rollingHash_append(U64 hash, void const* buf, size_t size) in ZSTD_rollingHash_append() argument 981 hash *= prime8bytes; in ZSTD_rollingHash_append() 982 hash += istart[pos] + ZSTD_ROLL_HASH_CHAR_OFFSET; in ZSTD_rollingHash_append() 984 return hash; in ZSTD_rollingHash_append() 1009 hash -= (toRemove + ZSTD_ROLL_HASH_CHAR_OFFSET) * primePower; in ZSTD_rollingHash_rotate() 1010 hash *= prime8bytes; in ZSTD_rollingHash_rotate() 1011 hash += toAdd + ZSTD_ROLL_HASH_CHAR_OFFSET; in ZSTD_rollingHash_rotate() 1012 return hash; in ZSTD_rollingHash_rotate() 1480 size_t const hash = hashAndTag >> ZSTD_SHORT_CACHE_TAG_BITS; in ZSTD_writeTaggedIndex() local [all …]
|
A D | zstd_lazy.c | 850 U32 const row = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_row_fillHashCache() 852 ms->hashCache[idx & ZSTD_ROW_HASH_CACHE_MASK] = hash; in ZSTD_row_fillHashCache() 875 { U32 const hash = cache[idx & ZSTD_ROW_HASH_CACHE_MASK]; in ZSTD_row_nextCachedHash() local 877 return hash; in ZSTD_row_nextCachedHash() 900 U32 const relRow = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_row_update_internalImpl() 906 tagRow[pos] = hash & ZSTD_ROW_HASH_TAG_MASK; in ZSTD_row_update_internalImpl() 1172 U32 hash; in ZSTD_RowFindBestMatch() local 1213 hash = (U32)ZSTD_hashPtrSalted(ip, hashLog + ZSTD_ROW_HASH_TAG_BITS, mls, hashSalt); in ZSTD_RowFindBestMatch() 1216 ms->hashSaltEntropy += hash; /* collect salt entropy */ in ZSTD_RowFindBestMatch() 1219 U32 const relRow = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_RowFindBestMatch() [all …]
|
A D | zstd_fast.c | 82 size_t const hash = ZSTD_hashPtr(ip + p, hBits, mls); in ZSTD_fillHashTableForCCtx() local 83 if (hashTable[hash] == 0) { /* not yet filled */ in ZSTD_fillHashTableForCCtx() 84 hashTable[hash] = curr + p; in ZSTD_fillHashTableForCCtx()
|
/lib/crypto/tests/ |
A D | hash-test-template.h | 133 u8 hash[HASH_SIZE]; in test_hash_all_lens_up_to_4096() local 139 HASH(test_buf, len, hash); in test_hash_all_lens_up_to_4096() 142 HASH_FINAL(&ctx, hash); in test_hash_all_lens_up_to_4096() 211 u8 hash[HASH_SIZE]; in test_hash_buffer_overruns() local 214 HASH(buf_end - len, len, hash); in test_hash_buffer_overruns() 217 HASH_FINAL(&ctx, hash); in test_hash_buffer_overruns() 228 HASH_FINAL(guarded_ctx, hash); in test_hash_buffer_overruns() 240 u8 hash[HASH_SIZE]; in test_hash_overlaps() local 254 test, hash, ovl_hash, HASH_SIZE, in test_hash_overlaps() 658 u8 hash[HASH_SIZE]; in benchmark_hash() local [all …]
|
A D | Kconfig | 19 KUnit tests for the SHA-1 cryptographic hash function and its 31 KUnit tests for the SHA-224 and SHA-256 cryptographic hash functions 43 KUnit tests for the SHA-384 and SHA-512 cryptographic hash functions
|
/lib/xz/ |
A D | xz_dec_stream.c | 92 struct xz_dec_hash hash; member 114 struct xz_dec_hash hash; member 265 s->block.hash.unpadded += 4; in dec_block() 269 s->block.hash.crc32 = xz_crc32( in dec_block() 270 (const uint8_t *)&s->block.hash, in dec_block() 271 sizeof(s->block.hash), s->block.hash.crc32); in dec_block() 328 s->index.hash.crc32 = xz_crc32( in dec_index() 330 sizeof(s->index.hash), in dec_index() 331 s->index.hash.crc32); in dec_index() 695 if (!memeq(&s->block.hash, &s->index.hash, in dec_main() [all …]
|
/lib/crypto/ |
A D | blake2s-selftest.c | 552 u8 hash[BLAKE2S_HASH_SIZE]; in blake2s_digest_test() local 568 blake2s(hash, buf, key + BLAKE2S_KEY_SIZE - keylen, outlen, i, in blake2s_digest_test() 570 if (memcmp(hash, blake2s_testvecs[i], outlen)) { in blake2s_digest_test() 584 blake2s_final(&state, hash); in blake2s_digest_test() 585 if (memcmp(hash, blake2s_testvecs[i], outlen)) { in blake2s_digest_test()
|
/lib/crypto/x86/ |
A D | Kconfig | 9 BLAKE2s cryptographic hash function (RFC 7693)
|
A D | sha1-avx2-asm.S | 151 .macro UPDATE_HASH hash, val 152 add \hash, \val 153 mov \val, \hash
|
A D | sha1-ssse3-and-avx.S | 243 .macro UPDATE_HASH hash, val 244 add \hash, \val 245 mov \val, \hash
|
/lib/crypto/arm/ |
A D | Kconfig | 7 BLAKE2s cryptographic hash function (RFC 7693)
|
/lib/zstd/decompress/ |
A D | zstd_decompress.c | 91 const U64 hash = xxh64(&dictID, sizeof(U32), 0); in ZSTD_DDictHashSet_getIndex() local 93 return hash & (hashSet->ddictPtrTableSize - 1); in ZSTD_DDictHashSet_getIndex()
|