1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * SHA-1 optimized for x86_64
4 *
5 * Copyright 2025 Google LLC
6 */
7 #include <asm/fpu/api.h>
8 #include <linux/static_call.h>
9
10 DEFINE_STATIC_CALL(sha1_blocks_x86, sha1_blocks_generic);
11
12 #define DEFINE_X86_SHA1_FN(c_fn, asm_fn) \
13 asmlinkage void asm_fn(struct sha1_block_state *state, \
14 const u8 *data, size_t nblocks); \
15 static void c_fn(struct sha1_block_state *state, \
16 const u8 *data, size_t nblocks) \
17 { \
18 if (likely(irq_fpu_usable())) { \
19 kernel_fpu_begin(); \
20 asm_fn(state, data, nblocks); \
21 kernel_fpu_end(); \
22 } else { \
23 sha1_blocks_generic(state, data, nblocks); \
24 } \
25 }
26
27 DEFINE_X86_SHA1_FN(sha1_blocks_ssse3, sha1_transform_ssse3);
28 DEFINE_X86_SHA1_FN(sha1_blocks_avx, sha1_transform_avx);
29 DEFINE_X86_SHA1_FN(sha1_blocks_ni, sha1_ni_transform);
30
31 #define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
32
33 asmlinkage void sha1_transform_avx2(struct sha1_block_state *state,
34 const u8 *data, size_t nblocks);
sha1_blocks_avx2(struct sha1_block_state * state,const u8 * data,size_t nblocks)35 static void sha1_blocks_avx2(struct sha1_block_state *state,
36 const u8 *data, size_t nblocks)
37 {
38 if (likely(irq_fpu_usable())) {
39 kernel_fpu_begin();
40 /* Select the optimal transform based on the number of blocks */
41 if (nblocks >= SHA1_AVX2_BLOCK_OPTSIZE)
42 sha1_transform_avx2(state, data, nblocks);
43 else
44 sha1_transform_avx(state, data, nblocks);
45 kernel_fpu_end();
46 } else {
47 sha1_blocks_generic(state, data, nblocks);
48 }
49 }
50
sha1_blocks(struct sha1_block_state * state,const u8 * data,size_t nblocks)51 static void sha1_blocks(struct sha1_block_state *state,
52 const u8 *data, size_t nblocks)
53 {
54 static_call(sha1_blocks_x86)(state, data, nblocks);
55 }
56
57 #define sha1_mod_init_arch sha1_mod_init_arch
sha1_mod_init_arch(void)58 static inline void sha1_mod_init_arch(void)
59 {
60 if (boot_cpu_has(X86_FEATURE_SHA_NI)) {
61 static_call_update(sha1_blocks_x86, sha1_blocks_ni);
62 } else if (cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
63 NULL) &&
64 boot_cpu_has(X86_FEATURE_AVX)) {
65 if (boot_cpu_has(X86_FEATURE_AVX2) &&
66 boot_cpu_has(X86_FEATURE_BMI1) &&
67 boot_cpu_has(X86_FEATURE_BMI2))
68 static_call_update(sha1_blocks_x86, sha1_blocks_avx2);
69 else
70 static_call_update(sha1_blocks_x86, sha1_blocks_avx);
71 } else if (boot_cpu_has(X86_FEATURE_SSSE3)) {
72 static_call_update(sha1_blocks_x86, sha1_blocks_ssse3);
73 }
74 }
75