Home
last modified time | relevance | path

Searched refs:msr (Results 1 – 25 of 227) sorted by relevance

12345678910

/arch/x86/kernel/cpu/
A Dperfctr-watchdog.c51 if (msr >= MSR_F15H_PERF_CTR) in nmi_perfctr_msr_to_bit()
53 return msr - MSR_K7_PERFCTR0; in nmi_perfctr_msr_to_bit()
60 return msr - MSR_P6_PERFCTR0; in nmi_perfctr_msr_to_bit()
62 return msr - MSR_KNC_PERFCTR0; in nmi_perfctr_msr_to_bit()
64 return msr - MSR_P4_BPU_PERFCTR0; in nmi_perfctr_msr_to_bit()
84 if (msr >= MSR_F15H_PERF_CTL) in nmi_evntsel_msr_to_bit()
86 return msr - MSR_K7_EVNTSEL0; in nmi_evntsel_msr_to_bit()
93 return msr - MSR_P6_EVNTSEL0; in nmi_evntsel_msr_to_bit()
95 return msr - MSR_KNC_EVNTSEL0; in nmi_evntsel_msr_to_bit()
97 return msr - MSR_P4_BSU_ESCR0; in nmi_evntsel_msr_to_bit()
[all …]
A Dfeat_ctl.c120 u64 msr; in init_ia32_feat_ctl() local
122 if (rdmsrq_safe(MSR_IA32_FEAT_CTL, &msr)) { in init_ia32_feat_ctl()
142 if (msr & FEAT_CTL_LOCKED) in init_ia32_feat_ctl()
149 msr = FEAT_CTL_LOCKED; in init_ia32_feat_ctl()
157 msr |= FEAT_CTL_VMX_ENABLED_OUTSIDE_SMX; in init_ia32_feat_ctl()
160 msr |= FEAT_CTL_VMX_ENABLED_INSIDE_SMX; in init_ia32_feat_ctl()
164 msr |= FEAT_CTL_SGX_ENABLED; in init_ia32_feat_ctl()
166 msr |= FEAT_CTL_SGX_LC_ENABLED; in init_ia32_feat_ctl()
169 wrmsrq(MSR_IA32_FEAT_CTL, msr); in init_ia32_feat_ctl()
177 if ( (tboot && !(msr & FEAT_CTL_VMX_ENABLED_INSIDE_SMX)) || in init_ia32_feat_ctl()
[all …]
A Dtsx.c111 u64 msr; in tsx_clear_cpuid() local
119 rdmsrq(MSR_TSX_FORCE_ABORT, msr); in tsx_clear_cpuid()
120 msr |= MSR_TFA_TSX_CPUID_CLEAR; in tsx_clear_cpuid()
121 wrmsrq(MSR_TSX_FORCE_ABORT, msr); in tsx_clear_cpuid()
123 rdmsrq(MSR_IA32_TSX_CTRL, msr); in tsx_clear_cpuid()
124 msr |= TSX_CTRL_CPUID_CLEAR; in tsx_clear_cpuid()
125 wrmsrq(MSR_IA32_TSX_CTRL, msr); in tsx_clear_cpuid()
/arch/x86/include/asm/
A Dmsr.h20 struct msr reg;
21 struct msr __percpu *msrs;
95 return __rdmsr(msr); in native_rdmsrq()
102 __wrmsrq((msr), (val))
108 val = __rdmsr(msr); in native_read_msr()
125 : "c" (msr)); in native_read_msr_safe()
137 native_wrmsrq(msr, val); in native_write_msr()
194 #define rdmsrq(msr, val) \ argument
330 #define rdmsrl(msr, val) rdmsrq(msr, val) argument
331 #define wrmsrl(msr, val) wrmsrq(msr, val) argument
[all …]
A Dmsr-trace.h3 #define TRACE_SYSTEM msr
6 #define TRACE_INCLUDE_FILE msr-trace
22 TP_PROTO(unsigned msr, u64 val, int failed),
23 TP_ARGS(msr, val, failed),
25 __field( unsigned, msr )
30 __entry->msr = msr;
35 __entry->msr,
41 TP_PROTO(unsigned msr, u64 val, int failed),
42 TP_ARGS(msr, val, failed)
47 TP_ARGS(msr, val, failed)
[all …]
/arch/x86/lib/
A Dmsr.c9 struct msr __percpu *msrs_alloc(void) in msrs_alloc()
11 struct msr __percpu *msrs = NULL; in msrs_alloc()
13 msrs = alloc_percpu(struct msr); in msrs_alloc()
39 static int msr_read(u32 msr, struct msr *m) in msr_read() argument
44 err = rdmsrq_safe(msr, &val); in msr_read()
59 static int msr_write(u32 msr, struct msr *m) in msr_write() argument
61 return wrmsrq_safe(msr, m->q); in msr_write()
66 struct msr m, m1; in __flip_bit()
72 err = msr_read(msr, &m); in __flip_bit()
85 err = msr_write(msr, &m1); in __flip_bit()
[all …]
A Dmsr-smp.c11 struct msr *reg; in __rdmsr_on_cpu()
24 struct msr *reg; in __wrmsr_on_cpu()
98 struct msr __percpu *msrs, in __rwmsr_on_cpus()
125 void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr __percpu *msrs) in rdmsr_on_cpus()
139 void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr __percpu *msrs) in wrmsr_on_cpus()
146 struct msr_info msr; member
156 rv->msr.err = rdmsr_safe(rv->msr.msr_no, &rv->msr.reg.l, &rv->msr.reg.h); in __rdmsr_safe_on_cpu()
177 rv.msr.msr_no = msr_no; in rdmsr_safe_on_cpu()
182 err = rv.msr.err; in rdmsr_safe_on_cpu()
184 *l = rv.msr.reg.l; in rdmsr_safe_on_cpu()
[all …]
/arch/powerpc/kvm/
A Dbook3s_hv_tm.c19 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure() local
25 if (msr & MSR_PR) { in emulate_tx_failure()
45 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation() local
111 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()
112 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation()
147 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()
161 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()
164 msr = (msr & ~MSR_TS_MASK) | MSR_TS_S; in kvmhv_p9_tm_emulation()
166 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation()
179 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()
[all …]
A Dbook3s_hv_tm_builtin.c23 u64 newmsr, msr, bescr; in kvmhv_p9_tm_emulation_early() local
45 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early()
52 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early()
67 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation_early()
68 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation_early()
77 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
84 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early()
90 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
[all …]
/arch/x86/kvm/
A Dmtrr.c28 switch (msr) { in find_mtrr()
30 index = msr - MTRRphysBase_MSR(0); in find_mtrr()
36 index = msr - MSR_MTRRfix16K_80000; in find_mtrr()
46 index = msr - MSR_MTRRfix4K_C0000; in find_mtrr()
66 if (msr == MSR_MTRRdefType) { in kvm_mtrr_valid()
70 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid()
83 if ((msr & 1) == 0) { in kvm_mtrr_valid()
100 mtrr = find_mtrr(vcpu, msr); in kvm_mtrr_set_msr()
104 if (!kvm_mtrr_valid(vcpu, msr, data)) in kvm_mtrr_set_msr()
116 if (msr == MSR_MTRRcap) { in kvm_mtrr_get_msr()
[all …]
/arch/m68k/bvme6000/
A Dconfig.c166 unsigned char msr; in bvme6000_timer_int() local
169 msr = rtc->msr & 0xc0; in bvme6000_timer_int()
170 rtc->msr = msr | 0x20; /* Ack the interrupt */ in bvme6000_timer_int()
191 unsigned char msr = rtc->msr & 0xc0; in bvme6000_sched_init() local
211 rtc->msr = msr; in bvme6000_sched_init()
233 unsigned char msr, msb; in bvme6000_read_clk() local
239 msr = rtc->msr & 0xc0; in bvme6000_read_clk()
244 t1int = rtc->msr & 0x20; in bvme6000_read_clk()
259 rtc->msr = msr; in bvme6000_read_clk()
286 unsigned char msr = rtc->msr & 0xc0; in bvme6000_hwclk() local
[all …]
A Drtc.c42 unsigned char msr; in rtc_ioctl() local
52 msr = rtc->msr & 0xc0; in rtc_ioctl()
53 rtc->msr = 0x40; in rtc_ioctl()
66 rtc->msr = msr; in rtc_ioctl()
108 msr = rtc->msr & 0xc0; in rtc_ioctl()
109 rtc->msr = 0x40; in rtc_ioctl()
123 rtc->msr = msr; in rtc_ioctl()
/arch/x86/events/
A Dprobe.c21 perf_msr_probe(struct perf_msr *msr, int cnt, bool zero, void *data) in perf_msr_probe() argument
31 if (!msr[bit].no_check) { in perf_msr_probe()
32 struct attribute_group *grp = msr[bit].grp; in perf_msr_probe()
42 if (!msr[bit].msr) in perf_msr_probe()
45 if (msr[bit].test && !msr[bit].test(bit, data)) in perf_msr_probe()
48 if (rdmsrq_safe(msr[bit].msr, &val)) in perf_msr_probe()
51 mask = msr[bit].mask; in perf_msr_probe()
/arch/microblaze/kernel/
A Dprocess.c72 local_save_flags(childregs->msr); in copy_thread()
73 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread()
83 childregs->msr |= MSR_UMS; in copy_thread()
95 childregs->msr &= ~MSR_EIP; in copy_thread()
96 childregs->msr |= MSR_IE; in copy_thread()
97 childregs->msr &= ~MSR_VM; in copy_thread()
98 childregs->msr |= MSR_VMS; in copy_thread()
101 ti->cpu_context.msr = (childregs->msr|MSR_VM); in copy_thread()
103 ti->cpu_context.msr &= ~MSR_IE; in copy_thread()
128 regs->msr |= MSR_UMS; in start_thread()
[all …]
/arch/x86/kvm/svm/
A Dpmu.c47 switch (msr) { in get_gp_pmc_amd()
56 if (!(msr & 0x1) != (type == PMU_TYPE_EVNTSEL)) in get_gp_pmc_amd()
62 idx = msr - MSR_K7_EVNTSEL0; in get_gp_pmc_amd()
67 idx = msr - MSR_K7_PERFCTR0; in get_gp_pmc_amd()
98 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_msr_idx_to_pmc()
108 switch (msr) { in amd_is_valid_msr()
118 if (msr > MSR_F15H_PERF_CTR5 && in amd_is_valid_msr()
124 return amd_msr_idx_to_pmc(vcpu, msr); in amd_is_valid_msr()
131 u32 msr = msr_info->index; in amd_pmu_get_msr() local
134 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_pmu_get_msr()
[all …]
/arch/powerpc/kernel/
A Dsignal_64.c130 unsigned long msr = regs->msr; in __unsafe_setup_sigcontext() local
147 msr |= MSR_VEC; in __unsafe_setup_sigcontext()
163 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext()
176 msr |= MSR_VSX; in __unsafe_setup_sigcontext()
237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
263 msr |= MSR_VEC; in setup_tm_sigcontexts()
285 if (msr & MSR_FP) in setup_tm_sigcontexts()
310 msr |= MSR_VSX; in setup_tm_sigcontexts()
576 regs_set_return_msr(regs, regs->msr | (msr & MSR_TS_MASK)); in restore_tm_sigcontexts()
865 unsigned long msr = regs->msr; in handle_rt_signal64() local
[all …]
A Dsignal_32.c271 unsigned long msr = regs->msr; in __unsafe_save_user_regs() local
283 msr |= MSR_VEC; in __unsafe_save_user_regs()
302 msr &= ~MSR_VSX; in __unsafe_save_user_regs()
312 msr |= MSR_VSX; in __unsafe_save_user_regs()
322 msr |= MSR_SPE; in __unsafe_save_user_regs()
400 msr |= MSR_VEC; in save_tm_user_regs_unsafe()
418 if (msr & MSR_FP) in save_tm_user_regs_unsafe()
436 msr |= MSR_VSX; in save_tm_user_regs_unsafe()
596 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (msr & MSR_LE)); in restore_tm_user_regs()
746 unsigned long msr = regs->msr; in handle_rt_signal32() local
[all …]
A Dcpu_setup_power.c19 u64 msr; in init_hvmode_206() local
21 msr = mfmsr(); in init_hvmode_206()
22 if (msr & MSR_HV) in init_hvmode_206()
153 u64 msr; in __restore_cpu_power7() local
155 msr = mfmsr(); in __restore_cpu_power7()
185 u64 msr; in __restore_cpu_power8() local
191 msr = mfmsr(); in __restore_cpu_power8()
225 u64 msr; in __restore_cpu_power9() local
230 msr = mfmsr(); in __restore_cpu_power9()
268 u64 msr; in __restore_cpu_power10() local
[all …]
A Dprocess.c153 unsigned long msr; in __giveup_fpu() local
156 msr = tsk->thread.regs->msr; in __giveup_fpu()
159 msr &= ~MSR_VSX; in __giveup_fpu()
239 msr = tsk->thread.regs->msr; in __giveup_altivec()
240 msr &= ~MSR_VEC; in __giveup_altivec()
242 msr &= ~MSR_VSX; in __giveup_altivec()
302 unsigned long msr = tsk->thread.regs->msr; in __giveup_vsx() local
308 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
311 if (msr & MSR_FP) in __giveup_vsx()
520 msr = regs->msr; in restore_math()
[all …]
/arch/x86/xen/
A Dpmu.c132 static bool is_amd_pmu_msr(u32 msr) in is_amd_pmu_msr() argument
138 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr()
140 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr()
213 switch (msr) { in xen_intel_pmu_emulate()
273 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate()
274 msr = get_fam15h_addr(msr); in xen_amd_pmu_emulate()
305 if (is_amd_pmu_msr(msr)) in pmu_msr_chk_emulated()
322 u32 msr; in xen_amd_read_pmc() local
326 native_read_msr_safe(msr, &val); in xen_amd_read_pmc()
344 u32 msr; in xen_intel_read_pmc() local
[all …]
/arch/arm64/kvm/hyp/nvhe/
A Dhyp-init.S100 msr mair_el2, x1
112 msr tpidr_el2, x0
122 msr tpidr_el2, x1
125 msr vttbr_el2, x1
128 msr vtcr_el2, x1
135 msr ttbr0_el2, x2
138 msr tcr_el2, x0
165 msr vbar_el2, x0
237 msr elr_el2, x1
239 msr spsr_el2, x0
[all …]
/arch/arm64/kernel/
A Dhyp-stub.S51 msr vbar_el2, x1
107 msr tpidr_el2, x0
111 msr cpacr_el1, x0
113 msr vbar_el1, x0
119 msr mdcr_el2, x0
123 msr tcr_el1, x0
125 msr ttbr0_el1, x0
127 msr ttbr1_el1, x0
129 msr mair_el1, x0
143 msr REG_PIR_EL1, x0
[all …]
/arch/powerpc/kernel/ptrace/
A Dptrace-tm.c39 task->thread.ckpt_regs.msr &= ~MSR_DEBUGCHANGE; in set_user_ckpt_msr()
40 task->thread.ckpt_regs.msr |= msr & MSR_DEBUGCHANGE; in set_user_ckpt_msr()
63 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_active()
97 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_get()
144 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_set()
205 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_active()
238 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_get()
283 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_set()
317 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_active()
354 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_get()
[all …]
/arch/arm64/mm/
A Dproc.S125 msr tpidr_el0, x2
128 msr cpacr_el1, x6
134 msr tcr_el1, x8
135 msr vbar_el1, x9
136 msr mdscr_el1, x10
138 msr sctlr_el1, x12
140 msr sp_el0, x14
144 msr osdlr_el1, x5
146 msr oslar_el1, x11
183 msr ttbr1_el1, x0
[all …]
/arch/x86/power/
A Dcpu.c43 struct saved_msr *msr = ctxt->saved_msrs.array; in msr_save_context() local
44 struct saved_msr *end = msr + ctxt->saved_msrs.num; in msr_save_context()
46 while (msr < end) { in msr_save_context()
47 if (msr->valid) in msr_save_context()
48 rdmsrq(msr->info.msr_no, msr->info.reg.q); in msr_save_context()
49 msr++; in msr_save_context()
55 struct saved_msr *msr = ctxt->saved_msrs.array; in msr_restore_context() local
58 while (msr < end) { in msr_restore_context()
59 if (msr->valid) in msr_restore_context()
60 wrmsrq(msr->info.msr_no, msr->info.reg.q); in msr_restore_context()
[all …]

Completed in 48 milliseconds

12345678910