1 /* SPDX-License-Identifier: GPL-2.0-only */
2 #ifndef X86_MSR_H
3 #define X86_MSR_H
4
5 #include <xen/errno.h>
6 #include <xen/percpu.h>
7
8 #include <asm/alternative.h>
9 #include <asm/asm_defns.h>
10 #include <asm/msr-index.h>
11
12 #define rdmsr(msr,val1,val2) \
13 __asm__ __volatile__("rdmsr" \
14 : "=a" (val1), "=d" (val2) \
15 : "c" (msr))
16
17 #define rdmsrl(msr,val) do { unsigned long a__,b__; \
18 __asm__ __volatile__("rdmsr" \
19 : "=a" (a__), "=d" (b__) \
20 : "c" (msr)); \
21 val = a__ | ((u64)b__<<32); \
22 } while(0)
23
24 #define wrmsr(msr,val1,val2) \
25 __asm__ __volatile__("wrmsr" \
26 : /* no outputs */ \
27 : "c" (msr), "a" (val1), "d" (val2))
28
wrmsrl(unsigned int msr,uint64_t val)29 static inline void wrmsrl(unsigned int msr, uint64_t val)
30 {
31 uint32_t lo = val, hi = val >> 32;
32
33 wrmsr(msr, lo, hi);
34 }
35
36 /* Non-serialising WRMSR, when available. Falls back to a serialising WRMSR. */
wrmsrns(uint32_t msr,uint64_t val)37 static inline void wrmsrns(uint32_t msr, uint64_t val)
38 {
39 uint32_t lo = val, hi = val >> 32;
40
41 /*
42 * WRMSR is 2 bytes. WRMSRNS is 3 bytes. Pad WRMSR with a redundant CS
43 * prefix to avoid a trailing NOP.
44 */
45 alternative_input(".byte 0x2e; wrmsr",
46 ".byte 0x0f,0x01,0xc6", X86_FEATURE_WRMSRNS,
47 "c" (msr), "a" (lo), "d" (hi));
48 }
49
50 /* rdmsr with exception handling */
51 #define rdmsr_safe(msr,val) ({\
52 int rc_; \
53 uint64_t lo_, hi_; \
54 __asm__ __volatile__( \
55 "1: rdmsr\n2:\n" \
56 ".section .fixup,\"ax\"\n" \
57 "3: xorl %k0,%k0\n; xorl %k1,%k1\n" \
58 " movl %5,%2\n; jmp 2b\n" \
59 ".previous\n" \
60 _ASM_EXTABLE(1b, 3b) \
61 : "=a" (lo_), "=d" (hi_), "=&r" (rc_) \
62 : "c" (msr), "2" (0), "i" (-EFAULT)); \
63 val = lo_ | (hi_ << 32); \
64 rc_; })
65
66 /* wrmsr with exception handling */
wrmsr_safe(unsigned int msr,uint64_t val)67 static inline int wrmsr_safe(unsigned int msr, uint64_t val)
68 {
69 uint32_t lo = val, hi = val >> 32;
70
71 asm_inline goto (
72 "1: wrmsr\n\t"
73 _ASM_EXTABLE(1b, %l[fault])
74 :
75 : "a" (lo), "c" (msr), "d" (hi)
76 :
77 : fault );
78
79 return 0;
80
81 fault:
82 return -EFAULT;
83 }
84
85 DECLARE_PER_CPU(uint64_t, efer);
read_efer(void)86 static inline uint64_t read_efer(void)
87 {
88 return this_cpu(efer);
89 }
90
write_efer(uint64_t val)91 static inline void write_efer(uint64_t val)
92 {
93 this_cpu(efer) = val;
94 wrmsrl(MSR_EFER, val);
95 }
96
97 DECLARE_PER_CPU(uint32_t, tsc_aux);
98
99 /* Lazy update of MSR_TSC_AUX */
wrmsr_tsc_aux(uint32_t val)100 static inline void wrmsr_tsc_aux(uint32_t val)
101 {
102 uint32_t *this_tsc_aux = &this_cpu(tsc_aux);
103
104 if ( *this_tsc_aux != val )
105 {
106 wrmsr(MSR_TSC_AUX, val, 0);
107 *this_tsc_aux = val;
108 }
109 }
110
111 #endif /* X86_MSR_H */
112