1
2 #ifndef __X86_ASM_DEFNS_H__
3 #define __X86_ASM_DEFNS_H__
4
5 #ifndef COMPILE_OFFSETS
6 /* NB. Auto-generated from arch/.../asm-offsets.c */
7 #include <asm/asm-offsets.h>
8 #endif
9 #include <asm/bug.h>
10 #include <asm/processor.h>
11 #include <asm/percpu.h>
12 #include <xen/stringify.h>
13 #include <asm/cpufeature.h>
14 #include <asm/alternative.h>
15
16 #ifndef __ASSEMBLY__
17 void ret_from_intr(void);
18 #endif
19
20 #ifdef CONFIG_FRAME_POINTER
21 /* Indicate special exception stack frame by inverting the frame pointer. */
22 #define SETUP_EXCEPTION_FRAME_POINTER(offs) \
23 leaq offs(%rsp),%rbp; \
24 notq %rbp
25 #else
26 #define SETUP_EXCEPTION_FRAME_POINTER(offs)
27 #endif
28
29 #ifndef NDEBUG
30 #define ASSERT_INTERRUPT_STATUS(x, msg) \
31 pushf; \
32 testb $X86_EFLAGS_IF>>8,1(%rsp); \
33 j##x 1f; \
34 ASSERT_FAILED(msg); \
35 1: addq $8,%rsp;
36 #else
37 #define ASSERT_INTERRUPT_STATUS(x, msg)
38 #endif
39
40 #define ASSERT_INTERRUPTS_ENABLED \
41 ASSERT_INTERRUPT_STATUS(nz, "INTERRUPTS ENABLED")
42 #define ASSERT_INTERRUPTS_DISABLED \
43 ASSERT_INTERRUPT_STATUS(z, "INTERRUPTS DISABLED")
44
45 /*
46 * This flag is set in an exception frame when registers R12-R15 did not get
47 * saved.
48 */
49 #define _TRAP_regs_partial 16
50 #define TRAP_regs_partial (1 << _TRAP_regs_partial)
51 /*
52 * This flag gets set in an exception frame when registers R12-R15 possibly
53 * get modified from their originally saved values and hence need to be
54 * restored even if the normal call flow would restore register values.
55 *
56 * The flag being set implies _TRAP_regs_partial to be unset. Restoring
57 * R12-R15 thus is
58 * - required when this flag is set,
59 * - safe when _TRAP_regs_partial is unset.
60 */
61 #define _TRAP_regs_dirty 17
62 #define TRAP_regs_dirty (1 << _TRAP_regs_dirty)
63
64 #define mark_regs_dirty(r) ({ \
65 struct cpu_user_regs *r__ = (r); \
66 ASSERT(!((r__)->entry_vector & TRAP_regs_partial)); \
67 r__->entry_vector |= TRAP_regs_dirty; \
68 })
69
70 #ifdef __ASSEMBLY__
71 # define _ASM_EX(p) p-.
72 #else
73 # define _ASM_EX(p) #p "-."
74 #endif
75
76 /* Exception table entry */
77 #ifdef __ASSEMBLY__
78 # define _ASM__EXTABLE(sfx, from, to) \
79 .section .ex_table##sfx, "a" ; \
80 .balign 4 ; \
81 .long _ASM_EX(from), _ASM_EX(to) ; \
82 .previous
83 #else
84 # define _ASM__EXTABLE(sfx, from, to) \
85 " .section .ex_table" #sfx ",\"a\"\n" \
86 " .balign 4\n" \
87 " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
88 " .previous\n"
89 #endif
90
91 #define _ASM_EXTABLE(from, to) _ASM__EXTABLE(, from, to)
92 #define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
93
94 #ifdef __ASSEMBLY__
95
96 #ifdef HAVE_GAS_QUOTED_SYM
97 #define SUBSECTION_LBL(tag) \
98 .ifndef .L.tag; \
99 .equ .L.tag, 1; \
100 .equ __stringify(__OBJECT_LABEL__.tag), .; \
101 .endif
102 #else
103 #define SUBSECTION_LBL(tag) \
104 .ifndef __OBJECT_LABEL__.tag; \
105 __OBJECT_LABEL__.tag:; \
106 .endif
107 #endif
108
109 #define UNLIKELY_START(cond, tag) \
110 .Ldispatch.tag: \
111 j##cond .Lunlikely.tag; \
112 .subsection 1; \
113 SUBSECTION_LBL(unlikely); \
114 .Lunlikely.tag:
115
116 #define UNLIKELY_DISPATCH_LABEL(tag) \
117 .Ldispatch.tag
118
119 #define UNLIKELY_DONE(cond, tag) \
120 j##cond .Llikely.tag
121
122 #define __UNLIKELY_END(tag) \
123 .subsection 0; \
124 .Llikely.tag:
125
126 #define UNLIKELY_END(tag) \
127 UNLIKELY_DONE(mp, tag); \
128 __UNLIKELY_END(tag)
129
130 #define STACK_CPUINFO_FIELD(field) (1 - CPUINFO_sizeof + CPUINFO_##field)
131 #define GET_STACK_END(reg) \
132 movl $STACK_SIZE-1, %e##reg; \
133 orq %rsp, %r##reg
134
135 #define GET_CPUINFO_FIELD(field, reg) \
136 GET_STACK_END(reg); \
137 addq $STACK_CPUINFO_FIELD(field), %r##reg
138
139 #define __GET_CURRENT(reg) \
140 movq STACK_CPUINFO_FIELD(current_vcpu)(%r##reg), %r##reg
141 #define GET_CURRENT(reg) \
142 GET_STACK_END(reg); \
143 __GET_CURRENT(reg)
144
145 #ifndef NDEBUG
146 #define ASSERT_NOT_IN_ATOMIC \
147 sti; /* sometimes called with interrupts disabled: safe to enable */ \
148 call ASSERT_NOT_IN_ATOMIC
149 #else
150 #define ASSERT_NOT_IN_ATOMIC
151 #endif
152
153 #define CPUINFO_FEATURE_OFFSET(feature) \
154 (CPUINFO_features + (cpufeat_word(feature) * 4))
155
156 #else
157
158 #ifdef HAVE_GAS_QUOTED_SYM
159 #define SUBSECTION_LBL(tag) \
160 ".ifndef .L." #tag "\n\t" \
161 ".equ .L." #tag ", 1\n\t" \
162 ".equ \"" __stringify(__OBJECT_LABEL__) "." #tag "\", .\n\t" \
163 ".endif"
164 #else
165 #define SUBSECTION_LBL(tag) \
166 ".ifndef " __stringify(__OBJECT_LABEL__) "." #tag "\n\t" \
167 __stringify(__OBJECT_LABEL__) "." #tag ":\n\t" \
168 ".endif"
169 #endif
170
171 #ifdef __clang__ /* clang's builtin assember can't do .subsection */
172
173 #define UNLIKELY_START_SECTION ".pushsection .text.unlikely,\"ax\""
174 #define UNLIKELY_END_SECTION ".popsection"
175
176 #else
177
178 #define UNLIKELY_START_SECTION ".subsection 1"
179 #define UNLIKELY_END_SECTION ".subsection 0"
180
181 #endif
182
183 #define UNLIKELY_START(cond, tag) \
184 "j" #cond " .Lunlikely." #tag ".%=;\n\t" \
185 UNLIKELY_START_SECTION "\n\t" \
186 SUBSECTION_LBL(unlikely) "\n" \
187 ".Lunlikely." #tag ".%=:"
188
189 #define UNLIKELY_END(tag) \
190 "jmp .Llikely." #tag ".%=;\n\t" \
191 UNLIKELY_END_SECTION "\n" \
192 ".Llikely." #tag ".%=:"
193
194 #endif
195
196 /* "Raw" instruction opcodes */
197 #define __ASM_CLAC .byte 0x0f,0x01,0xca
198 #define __ASM_STAC .byte 0x0f,0x01,0xcb
199
200 #ifdef __ASSEMBLY__
201 #define ASM_AC(op) \
202 661: ASM_NOP3; \
203 .pushsection .altinstr_replacement, "ax"; \
204 662: __ASM_##op; \
205 .popsection; \
206 .pushsection .altinstructions, "a"; \
207 altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
208 altinstruction_entry 661b, 662b, X86_FEATURE_XEN_SMAP, 3, 3; \
209 .popsection
210
211 #define ASM_STAC ASM_AC(STAC)
212 #define ASM_CLAC ASM_AC(CLAC)
213
214 #define CR4_PV32_RESTORE \
215 667: ASM_NOP5; \
216 .pushsection .altinstr_replacement, "ax"; \
217 668: call cr4_pv32_restore; \
218 .section .altinstructions, "a"; \
219 altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
220 altinstruction_entry 667b, 668b, X86_FEATURE_XEN_SMEP, 5, 5; \
221 altinstruction_entry 667b, 668b, X86_FEATURE_XEN_SMAP, 5, 5; \
222 .popsection
223
224 #else
clac(void)225 static always_inline void clac(void)
226 {
227 /* Note: a barrier is implicit in alternative() */
228 alternative(ASM_NOP3, __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP);
229 }
230
stac(void)231 static always_inline void stac(void)
232 {
233 /* Note: a barrier is implicit in alternative() */
234 alternative(ASM_NOP3, __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP);
235 }
236 #endif
237
238 #ifdef __ASSEMBLY__
239 .macro SAVE_ALL op
240 .ifeqs "\op", "CLAC"
241 ASM_CLAC
242 .else
243 .ifeqs "\op", "STAC"
244 ASM_STAC
245 .else
246 .ifnb \op
247 .err
248 .endif
249 .endif
250 .endif
251 addq $-(UREGS_error_code-UREGS_r15), %rsp
252 cld
253 movq %rdi,UREGS_rdi(%rsp)
254 movq %rsi,UREGS_rsi(%rsp)
255 movq %rdx,UREGS_rdx(%rsp)
256 movq %rcx,UREGS_rcx(%rsp)
257 movq %rax,UREGS_rax(%rsp)
258 movq %r8,UREGS_r8(%rsp)
259 movq %r9,UREGS_r9(%rsp)
260 movq %r10,UREGS_r10(%rsp)
261 movq %r11,UREGS_r11(%rsp)
262 movq %rbx,UREGS_rbx(%rsp)
263 movq %rbp,UREGS_rbp(%rsp)
264 SETUP_EXCEPTION_FRAME_POINTER(UREGS_rbp)
265 movq %r12,UREGS_r12(%rsp)
266 movq %r13,UREGS_r13(%rsp)
267 movq %r14,UREGS_r14(%rsp)
268 movq %r15,UREGS_r15(%rsp)
269 .endm
270
271 /*
272 * Save all registers not preserved by C code or used in entry/exit code. Mark
273 * the frame as partial.
274 *
275 * @type: exception type
276 * @compat: R8-R15 don't need saving, and the frame nevertheless is complete
277 */
278 .macro SAVE_VOLATILE type compat=0
279 .if \compat
280 movl $\type,UREGS_entry_vector-UREGS_error_code(%rsp)
281 .else
282 movl $\type|TRAP_regs_partial,\
283 UREGS_entry_vector-UREGS_error_code(%rsp)
284 .endif
285 addq $-(UREGS_error_code-UREGS_r15),%rsp
286 cld
287 movq %rdi,UREGS_rdi(%rsp)
288 movq %rsi,UREGS_rsi(%rsp)
289 movq %rdx,UREGS_rdx(%rsp)
290 movq %rcx,UREGS_rcx(%rsp)
291 movq %rax,UREGS_rax(%rsp)
292 .if !\compat
293 movq %r8,UREGS_r8(%rsp)
294 movq %r9,UREGS_r9(%rsp)
295 movq %r10,UREGS_r10(%rsp)
296 movq %r11,UREGS_r11(%rsp)
297 .endif
298 movq %rbx,UREGS_rbx(%rsp)
299 movq %rbp,UREGS_rbp(%rsp)
300 SETUP_EXCEPTION_FRAME_POINTER(UREGS_rbp)
301 .endm
302
303 /*
304 * Complete a frame potentially only partially saved.
305 */
306 .macro SAVE_PRESERVED
307 btrl $_TRAP_regs_partial,UREGS_entry_vector(%rsp)
308 jnc 987f
309 movq %r12,UREGS_r12(%rsp)
310 movq %r13,UREGS_r13(%rsp)
311 movq %r14,UREGS_r14(%rsp)
312 movq %r15,UREGS_r15(%rsp)
313 987:
314 .endm
315
316 #define LOAD_ONE_REG(reg, compat) \
317 .if !(compat); \
318 movq UREGS_r##reg(%rsp),%r##reg; \
319 .else; \
320 movl UREGS_r##reg(%rsp),%e##reg; \
321 .endif
322
323 /*
324 * Restore all previously saved registers.
325 *
326 * @adj: extra stack pointer adjustment to be folded into the adjustment done
327 * anyway at the end of the macro
328 * @compat: R8-R15 don't need reloading, but they are clobbered for added
329 * safety against information leaks.
330 */
331 .macro RESTORE_ALL adj=0 compat=0
332 .if !\compat
333 testl $TRAP_regs_dirty,UREGS_entry_vector(%rsp)
334 movq UREGS_r11(%rsp),%r11
335 movq UREGS_r10(%rsp),%r10
336 movq UREGS_r9(%rsp),%r9
337 movq UREGS_r8(%rsp),%r8
338 .else
339 xor %r11, %r11
340 xor %r10, %r10
341 xor %r9, %r9
342 xor %r8, %r8
343 .endif
344 LOAD_ONE_REG(ax, \compat)
345 LOAD_ONE_REG(cx, \compat)
346 LOAD_ONE_REG(dx, \compat)
347 LOAD_ONE_REG(si, \compat)
348 LOAD_ONE_REG(di, \compat)
349 .if !\compat
350 jz 987f
351 movq UREGS_r15(%rsp),%r15
352 movq UREGS_r14(%rsp),%r14
353 movq UREGS_r13(%rsp),%r13
354 movq UREGS_r12(%rsp),%r12
355 #ifndef NDEBUG
356 .subsection 1
357 987: testl $TRAP_regs_partial,UREGS_entry_vector(%rsp)
358 jnz 987f
359 cmpq UREGS_r15(%rsp),%r15
360 jne 789f
361 cmpq UREGS_r14(%rsp),%r14
362 jne 789f
363 cmpq UREGS_r13(%rsp),%r13
364 jne 789f
365 cmpq UREGS_r12(%rsp),%r12
366 je 987f
367 789: BUG /* Corruption of partial register state. */
368 .subsection 0
369 #endif
370 .else
371 xor %r15, %r15
372 xor %r14, %r14
373 xor %r13, %r13
374 xor %r12, %r12
375 .endif
376 987:
377 LOAD_ONE_REG(bp, \compat)
378 LOAD_ONE_REG(bx, \compat)
379 subq $-(UREGS_error_code-UREGS_r15+\adj), %rsp
380 .endm
381
382 #endif
383
384 #ifdef CONFIG_PERF_COUNTERS
385 #define PERFC_INCR(_name,_idx,_cur) \
386 pushq _cur; \
387 movslq VCPU_processor(_cur),_cur; \
388 pushq %rdx; \
389 leaq __per_cpu_offset(%rip),%rdx; \
390 movq (%rdx,_cur,8),_cur; \
391 leaq per_cpu__perfcounters(%rip),%rdx; \
392 addq %rdx,_cur; \
393 popq %rdx; \
394 incl ASM_PERFC_##_name*4(_cur,_idx,4); \
395 popq _cur
396 #else
397 #define PERFC_INCR(_name,_idx,_cur)
398 #endif
399
400 /* Work around AMD erratum #88 */
401 #define safe_swapgs \
402 "mfence; swapgs;"
403
404 #ifdef __sun__
405 #define REX64_PREFIX "rex64\\"
406 #elif defined(__clang__)
407 #define REX64_PREFIX ".byte 0x48; "
408 #else
409 #define REX64_PREFIX "rex64/"
410 #endif
411
412 #define ELFNOTE(name, type, desc) \
413 .pushsection .note.name, "a", @note ; \
414 .p2align 2 ; \
415 .long 2f - 1f /* namesz */ ; \
416 .long 4f - 3f /* descsz */ ; \
417 .long type /* type */ ; \
418 1: .asciz #name /* name */ ; \
419 2: .p2align 2 ; \
420 3: desc /* desc */ ; \
421 4: .p2align 2 ; \
422 .popsection
423
424 #endif /* __X86_ASM_DEFNS_H__ */
425