1 #ifndef X86_EMULATE_H
2 #define X86_EMULATE_H
3 
4 #include <assert.h>
5 #include <stdbool.h>
6 #include <stddef.h>
7 #include <stdint.h>
8 #include <stdlib.h>
9 /*
10  * Use of sse registers must be disabled prior to the definition of
11  * always_inline functions that would use them (memcpy, memset, etc),
12  * so do this as early as possible, aiming to be before any always_inline
13  * functions that are used are declared.
14  * Unfortunately, this cannot be done prior to inclusion of <stdlib.h>
15  * due to functions such as 'atof' that have SSE register return declared,
16  * so do so here, immediately after that.
17  */
18 #if __GNUC__ >= 6
19 # pragma GCC target("no-sse")
20 #endif
21  /*
22  * Attempt detection of unwanted prior inclusion of some headers known to use
23  * always_inline with SSE registers in some library / compiler / optimization
24  * combinations.
25  */
26 #ifdef _STRING_H
27 # error "Must not include <string.h> before x86-emulate.h"
28 #endif
29 #include <string.h>
30 
31 /* EOF is a standard macro defined in <stdio.h> so use it for detection */
32 #ifdef EOF
33 # error "Must not include <stdio.h> before x86-emulate.h"
34 #endif
35 #include <stdio.h>
36 
37 #include <xen/xen.h>
38 
39 #include <xen/asm/msr-index.h>
40 #include <xen/asm/x86-defns.h>
41 #include <xen/asm/x86-vendors.h>
42 
43 #include <xen-tools/common-macros.h>
44 
45 #define BUG() abort()
46 #define ASSERT assert
47 #define ASSERT_UNREACHABLE() assert(!__LINE__)
48 
49 #define DEFINE_PER_CPU(type, var) type per_cpu_##var
50 #define this_cpu(var) per_cpu_##var
51 
52 #define __init
53 #define __maybe_unused __attribute__((__unused__))
54 
55 #define likely(x)   __builtin_expect(!!(x), true)
56 #define unlikely(x) __builtin_expect(!!(x), false)
57 
58 #define cf_check /* No Control Flow Integriy checking */
59 
60 #ifdef __GCC_ASM_FLAG_OUTPUTS__
61 # define ASM_FLAG_OUT(yes, no) yes
62 #else
63 # define ASM_FLAG_OUT(yes, no) no
64 #endif
65 
66 #define hweight32 __builtin_popcount
67 #define hweight64 __builtin_popcountll
68 
69 #define is_canonical_address(x) (((int64_t)(x) >> 47) == ((int64_t)(x) >> 63))
70 
71 extern uint32_t mxcsr_mask;
72 extern struct cpu_policy cp;
73 
74 #define MMAP_SZ 16384
75 bool emul_test_init(void);
76 
77 /* Must save and restore FPU state between any call into libc. */
78 void emul_save_fpu_state(void);
79 void emul_restore_fpu_state(void);
80 
81 struct x86_fxsr *get_fpu_save_area(void);
82 
83 /*
84  * In order to reasonably use the above, wrap library calls we use and which we
85  * think might access any of the FPU state into wrappers saving/restoring state
86  * around the actual function.
87  */
88 #ifndef WRAP
89 # define WRAP(x) typeof(x) __wrap_ ## x
90 #endif
91 
92 WRAP(fwrite);
93 WRAP(memcmp);
94 WRAP(memcpy);
95 WRAP(memset);
96 WRAP(printf);
97 WRAP(putchar);
98 WRAP(puts);
99 WRAP(snprintf);
100 WRAP(strstr);
101 WRAP(vprintf);
102 WRAP(vsnprintf);
103 
104 #undef WRAP
105 
106 #include "x86_emulate/x86_emulate.h"
107 
108 void evex_disp8_test(void *instr, struct x86_emulate_ctxt *ctxt,
109                      const struct x86_emulate_ops *ops);
110 void predicates_test(void *instr, struct x86_emulate_ctxt *ctxt,
111                      int (*fetch)(unsigned long offset,
112                                   void *p_data,
113                                   unsigned int bytes,
114                                   struct x86_emulate_ctxt *ctxt));
115 
xgetbv(uint32_t xcr)116 static inline uint64_t xgetbv(uint32_t xcr)
117 {
118     uint32_t lo, hi;
119 
120     asm ( ".byte 0x0f, 0x01, 0xd0" : "=a" (lo), "=d" (hi) : "c" (xcr) );
121 
122     return ((uint64_t)hi << 32) | lo;
123 }
124 
125 /* Intentionally checking OSXSAVE here. */
126 #define cpu_has_xsave     (cp.basic.raw[1].c & (1u << 27))
127 
xcr0_mask(uint64_t mask)128 static inline bool xcr0_mask(uint64_t mask)
129 {
130     return cpu_has_xsave && ((xgetbv(0) & mask) == mask);
131 }
132 
133 unsigned int rdpkru(void);
134 void wrpkru(unsigned int val);
135 
136 #define cache_line_size() (cp.basic.clflush_size * 8)
137 #define cpu_has_fpu        cp.basic.fpu
138 #define cpu_has_mmx        cp.basic.mmx
139 #define cpu_has_fxsr       cp.basic.fxsr
140 #define cpu_has_sse        cp.basic.sse
141 #define cpu_has_sse2       cp.basic.sse2
142 #define cpu_has_sse3       cp.basic.sse3
143 #define cpu_has_pclmulqdq  cp.basic.pclmulqdq
144 #define cpu_has_ssse3      cp.basic.ssse3
145 #define cpu_has_fma       (cp.basic.fma && xcr0_mask(6))
146 #define cpu_has_sse4_1     cp.basic.sse4_1
147 #define cpu_has_sse4_2     cp.basic.sse4_2
148 #define cpu_has_popcnt     cp.basic.popcnt
149 #define cpu_has_aesni      cp.basic.aesni
150 #define cpu_has_avx       (cp.basic.avx  && xcr0_mask(6))
151 #define cpu_has_f16c      (cp.basic.f16c && xcr0_mask(6))
152 
153 #define cpu_has_avx2      (cp.feat.avx2 && xcr0_mask(6))
154 #define cpu_has_bmi1       cp.feat.bmi1
155 #define cpu_has_bmi2       cp.feat.bmi2
156 #define cpu_has_avx512f   (cp.feat.avx512f  && xcr0_mask(0xe6))
157 #define cpu_has_avx512dq  (cp.feat.avx512dq && xcr0_mask(0xe6))
158 #define cpu_has_avx512_ifma (cp.feat.avx512_ifma && xcr0_mask(0xe6))
159 #define cpu_has_avx512er  (cp.feat.avx512er && xcr0_mask(0xe6))
160 #define cpu_has_avx512cd  (cp.feat.avx512cd && xcr0_mask(0xe6))
161 #define cpu_has_sha        cp.feat.sha
162 #define cpu_has_avx512bw  (cp.feat.avx512bw && xcr0_mask(0xe6))
163 #define cpu_has_avx512vl  (cp.feat.avx512vl && xcr0_mask(0xe6))
164 #define cpu_has_avx512_vbmi (cp.feat.avx512_vbmi && xcr0_mask(0xe6))
165 #define cpu_has_avx512_vbmi2 (cp.feat.avx512_vbmi2 && xcr0_mask(0xe6))
166 #define cpu_has_gfni       cp.feat.gfni
167 #define cpu_has_vaes      (cp.feat.vaes && xcr0_mask(6))
168 #define cpu_has_vpclmulqdq (cp.feat.vpclmulqdq && xcr0_mask(6))
169 #define cpu_has_avx512_vnni (cp.feat.avx512_vnni && xcr0_mask(0xe6))
170 #define cpu_has_avx512_bitalg (cp.feat.avx512_bitalg && xcr0_mask(0xe6))
171 #define cpu_has_avx512_vpopcntdq (cp.feat.avx512_vpopcntdq && xcr0_mask(0xe6))
172 #define cpu_has_movdiri    cp.feat.movdiri
173 #define cpu_has_movdir64b  cp.feat.movdir64b
174 #define cpu_has_avx512_4vnniw (cp.feat.avx512_4vnniw && xcr0_mask(0xe6))
175 #define cpu_has_avx512_4fmaps (cp.feat.avx512_4fmaps && xcr0_mask(0xe6))
176 #define cpu_has_avx512_vp2intersect (cp.feat.avx512_vp2intersect && xcr0_mask(0xe6))
177 #define cpu_has_serialize  cp.feat.serialize
178 #define cpu_has_avx512_fp16 (cp.feat.avx512_fp16 && xcr0_mask(0xe6))
179 #define cpu_has_sha512     (cp.feat.sha512 && xcr0_mask(6))
180 #define cpu_has_sm3        (cp.feat.sm3 && xcr0_mask(6))
181 #define cpu_has_sm4        (cp.feat.sm4 && xcr0_mask(6))
182 #define cpu_has_avx_vnni   (cp.feat.avx_vnni && xcr0_mask(6))
183 #define cpu_has_avx512_bf16 (cp.feat.avx512_bf16 && xcr0_mask(0xe6))
184 #define cpu_has_avx_ifma   (cp.feat.avx_ifma && xcr0_mask(6))
185 #define cpu_has_avx_vnni_int8 (cp.feat.avx_vnni_int8 && xcr0_mask(6))
186 #define cpu_has_avx_ne_convert (cp.feat.avx_ne_convert && xcr0_mask(6))
187 #define cpu_has_avx_vnni_int16 (cp.feat.avx_vnni_int16 && xcr0_mask(6))
188 
189 #define cpu_has_xgetbv1   (cpu_has_xsave && cp.xstate.xgetbv1)
190 
191 #define cpu_has_3dnow_ext  cp.extd._3dnowext
192 #define cpu_has_sse4a      cp.extd.sse4a
193 #define cpu_has_xop       (cp.extd.xop  && xcr0_mask(6))
194 #define cpu_has_fma4      (cp.extd.fma4 && xcr0_mask(6))
195 #define cpu_has_tbm        cp.extd.tbm
196 
197 int emul_test_cpuid(
198     uint32_t leaf,
199     uint32_t subleaf,
200     struct cpuid_leaf *res,
201     struct x86_emulate_ctxt *ctxt);
202 
203 int emul_test_read_cr(
204     unsigned int reg,
205     unsigned long *val,
206     struct x86_emulate_ctxt *ctxt);
207 
208 int emul_test_read_xcr(
209     unsigned int reg,
210     uint64_t *val,
211     struct x86_emulate_ctxt *ctxt);
212 
213 int emul_test_get_fpu(
214     enum x86_emulate_fpu_type type,
215     struct x86_emulate_ctxt *ctxt);
216 
217 void emul_test_put_fpu(
218     struct x86_emulate_ctxt *ctxt,
219     enum x86_emulate_fpu_type backout,
220     const struct x86_emul_fpu_aux *aux);
221 
222 #endif /* X86_EMULATE_H */
223