1 // SPDX-License-Identifier: GPL-2.0-only
2 #include <asm/switch_to.h>
3 #include <crypto/internal/simd.h>
4 #include <linux/cpufeature.h>
5 #include <linux/jump_label.h>
6 #include <linux/preempt.h>
7 #include <linux/uaccess.h>
8
9 #define VMX_ALIGN 16
10 #define VMX_ALIGN_MASK (VMX_ALIGN-1)
11
12 #define VECTOR_BREAKPOINT 512
13
14 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_vec_crypto);
15
16 #define crc32_le_arch crc32_le_base /* not implemented on this arch */
17 #define crc32_be_arch crc32_be_base /* not implemented on this arch */
18
19 u32 __crc32c_vpmsum(u32 crc, const u8 *p, size_t len);
20
crc32c_arch(u32 crc,const u8 * p,size_t len)21 static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
22 {
23 unsigned int prealign;
24 unsigned int tail;
25
26 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) ||
27 !static_branch_likely(&have_vec_crypto) || !crypto_simd_usable())
28 return crc32c_base(crc, p, len);
29
30 if ((unsigned long)p & VMX_ALIGN_MASK) {
31 prealign = VMX_ALIGN - ((unsigned long)p & VMX_ALIGN_MASK);
32 crc = crc32c_base(crc, p, prealign);
33 len -= prealign;
34 p += prealign;
35 }
36
37 if (len & ~VMX_ALIGN_MASK) {
38 preempt_disable();
39 pagefault_disable();
40 enable_kernel_altivec();
41 crc = __crc32c_vpmsum(crc, p, len & ~VMX_ALIGN_MASK);
42 disable_kernel_altivec();
43 pagefault_enable();
44 preempt_enable();
45 }
46
47 tail = len & VMX_ALIGN_MASK;
48 if (tail) {
49 p += len & ~VMX_ALIGN_MASK;
50 crc = crc32c_base(crc, p, tail);
51 }
52
53 return crc;
54 }
55
56 #define crc32_mod_init_arch crc32_mod_init_arch
crc32_mod_init_arch(void)57 static inline void crc32_mod_init_arch(void)
58 {
59 if (cpu_has_feature(CPU_FTR_ARCH_207S) &&
60 (cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO))
61 static_branch_enable(&have_vec_crypto);
62 }
63
crc32_optimizations_arch(void)64 static inline u32 crc32_optimizations_arch(void)
65 {
66 if (static_key_enabled(&have_vec_crypto))
67 return CRC32C_OPTIMIZATION;
68 return 0;
69 }
70