Home
last modified time | relevance | path

Searched refs:msr (Results 1 – 25 of 358) sorted by relevance

12345678910>>...15

/linux-6.3-rc2/tools/testing/selftests/kvm/x86_64/
A Dhyperv_features.c47 GUEST_ASSERT(msr->idx); in guest_msr()
49 if (msr->write) in guest_msr()
50 vector = wrmsr_safe(msr->idx, msr->write_val); in guest_msr()
52 if (!vector && (!msr->write || !is_write_only_msr(msr->idx))) in guest_msr()
63 if (msr->write) in guest_msr()
64 GUEST_ASSERT_3(msr_val == msr->write_val, msr->idx, in guest_msr()
130 struct msr_data *msr; in guest_test_msrs_access() local
181 msr->write = true; in guest_test_msrs_access()
210 msr->write = true; in guest_test_msrs_access()
229 msr->write = true; in guest_test_msrs_access()
[all …]
A Duserspace_msr_exit_test.c419 run->msr.index, msr_index); in process_rdmsr()
421 switch (run->msr.index) { in process_rdmsr()
423 run->msr.data = 0; in process_rdmsr()
426 run->msr.error = 1; in process_rdmsr()
432 run->msr.data = MSR_FS_BASE; in process_rdmsr()
456 switch (run->msr.index) { in process_wrmsr()
458 if (run->msr.data != 0) in process_wrmsr()
459 run->msr.error = 1; in process_wrmsr()
462 if (run->msr.data != 1) in process_wrmsr()
463 run->msr.error = 1; in process_wrmsr()
[all …]
A Dkvm_pv_test.c20 #define TEST_MSR(msr) { .idx = msr, .name = #msr } argument
22 #define PR_MSR(msr) ucall(UCALL_PR_MSR, 1, msr) argument
41 static void test_msr(struct msr_data *msr) in test_msr() argument
46 PR_MSR(msr); in test_msr()
48 vector = rdmsr_safe(msr->idx, &ignored); in test_msr()
51 vector = wrmsr_safe(msr->idx, 0); in test_msr()
100 struct msr_data *msr = (struct msr_data *)uc->args[0]; in pr_msr() local
102 pr_info("testing msr: %s (%#x)\n", msr->name, msr->idx); in pr_msr()
/linux-6.3-rc2/arch/x86/kernel/cpu/
A Dperfctr-watchdog.c51 if (msr >= MSR_F15H_PERF_CTR) in nmi_perfctr_msr_to_bit()
53 return msr - MSR_K7_PERFCTR0; in nmi_perfctr_msr_to_bit()
60 return msr - MSR_P6_PERFCTR0; in nmi_perfctr_msr_to_bit()
62 return msr - MSR_KNC_PERFCTR0; in nmi_perfctr_msr_to_bit()
64 return msr - MSR_P4_BPU_PERFCTR0; in nmi_perfctr_msr_to_bit()
84 if (msr >= MSR_F15H_PERF_CTL) in nmi_evntsel_msr_to_bit()
86 return msr - MSR_K7_EVNTSEL0; in nmi_evntsel_msr_to_bit()
93 return msr - MSR_P6_EVNTSEL0; in nmi_evntsel_msr_to_bit()
95 return msr - MSR_KNC_EVNTSEL0; in nmi_evntsel_msr_to_bit()
97 return msr - MSR_P4_BSU_ESCR0; in nmi_evntsel_msr_to_bit()
[all …]
A Dfeat_ctl.c117 u64 msr; in init_ia32_feat_ctl() local
119 if (rdmsrl_safe(MSR_IA32_FEAT_CTL, &msr)) { in init_ia32_feat_ctl()
139 if (msr & FEAT_CTL_LOCKED) in init_ia32_feat_ctl()
146 msr = FEAT_CTL_LOCKED; in init_ia32_feat_ctl()
154 msr |= FEAT_CTL_VMX_ENABLED_OUTSIDE_SMX; in init_ia32_feat_ctl()
157 msr |= FEAT_CTL_VMX_ENABLED_INSIDE_SMX; in init_ia32_feat_ctl()
161 msr |= FEAT_CTL_SGX_ENABLED; in init_ia32_feat_ctl()
163 msr |= FEAT_CTL_SGX_LC_ENABLED; in init_ia32_feat_ctl()
166 wrmsrl(MSR_IA32_FEAT_CTL, msr); in init_ia32_feat_ctl()
174 if ( (tboot && !(msr & FEAT_CTL_VMX_ENABLED_INSIDE_SMX)) || in init_ia32_feat_ctl()
[all …]
/linux-6.3-rc2/arch/x86/include/asm/
A Dmsr.h17 struct msr reg;
18 struct msr *msrs;
102 u64 __val = __rdmsr((msr)); \
108 __wrmsr(msr, low, high)
118 val = __rdmsr(msr); in native_read_msr()
135 : "c" (msr)); in native_read_msr_safe()
145 __wrmsr(msr, low, high); in native_write_msr()
257 #define rdmsrl(msr, val) \ argument
308 struct msr *msrs_alloc(void);
348 struct msr *msrs) in rdmsr_on_cpus()
[all …]
A Dmsr-trace.h3 #define TRACE_SYSTEM msr
6 #define TRACE_INCLUDE_FILE msr-trace
22 TP_PROTO(unsigned msr, u64 val, int failed),
23 TP_ARGS(msr, val, failed),
25 __field( unsigned, msr )
30 __entry->msr = msr;
35 __entry->msr,
41 TP_PROTO(unsigned msr, u64 val, int failed),
42 TP_ARGS(msr, val, failed)
47 TP_ARGS(msr, val, failed)
[all …]
/linux-6.3-rc2/arch/x86/lib/
A Dmsr.c9 struct msr *msrs_alloc(void) in msrs_alloc()
11 struct msr *msrs = NULL; in msrs_alloc()
13 msrs = alloc_percpu(struct msr); in msrs_alloc()
23 void msrs_free(struct msr *msrs) in msrs_free()
39 static int msr_read(u32 msr, struct msr *m) in msr_read() argument
44 err = rdmsrl_safe(msr, &val); in msr_read()
57 static int msr_write(u32 msr, struct msr *m) in msr_write() argument
59 return wrmsrl_safe(msr, m->q); in msr_write()
64 struct msr m, m1; in __flip_bit()
70 err = msr_read(msr, &m); in __flip_bit()
[all …]
/linux-6.3-rc2/arch/powerpc/kvm/
A Dbook3s_hv_tm.c19 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure() local
25 if (msr & MSR_PR) { in emulate_tx_failure()
45 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation() local
111 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()
112 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation()
147 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()
161 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation()
164 msr = (msr & ~MSR_TS_MASK) | MSR_TS_S; in kvmhv_p9_tm_emulation()
166 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation()
179 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()
[all …]
A Dbook3s_hv_tm_builtin.c23 u64 newmsr, msr, bescr; in kvmhv_p9_tm_emulation_early() local
45 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early()
52 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early()
67 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation_early()
68 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation_early()
77 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
84 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early()
90 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early()
[all …]
/linux-6.3-rc2/arch/m68k/bvme6000/
A Dconfig.c169 unsigned char msr; in bvme6000_timer_int() local
172 msr = rtc->msr & 0xc0; in bvme6000_timer_int()
173 rtc->msr = msr | 0x20; /* Ack the interrupt */ in bvme6000_timer_int()
194 unsigned char msr = rtc->msr & 0xc0; in bvme6000_sched_init() local
214 rtc->msr = msr; in bvme6000_sched_init()
236 unsigned char msr, msb; in bvme6000_read_clk() local
242 msr = rtc->msr & 0xc0; in bvme6000_read_clk()
247 t1int = rtc->msr & 0x20; in bvme6000_read_clk()
262 rtc->msr = msr; in bvme6000_read_clk()
289 unsigned char msr = rtc->msr & 0xc0; in bvme6000_hwclk() local
[all …]
A Drtc.c42 unsigned char msr; in rtc_ioctl() local
52 msr = rtc->msr & 0xc0; in rtc_ioctl()
53 rtc->msr = 0x40; in rtc_ioctl()
66 rtc->msr = msr; in rtc_ioctl()
108 msr = rtc->msr & 0xc0; in rtc_ioctl()
109 rtc->msr = 0x40; in rtc_ioctl()
123 rtc->msr = msr; in rtc_ioctl()
/linux-6.3-rc2/tools/power/x86/turbostat/
A Dturbostat.c2441 base_cpu, msr, msr & 0x2 ? "EN" : "DIS"); in dump_nhm_platform_info()
4061 msr = (msr >> 30) & 1; in check_tcc_offset()
4422 cpu, msr, ((msr) & 0x1) ? "EN" : "Dis", ((msr) & 0x2) ? "EN" : "Dis"); in print_hwp()
4429 cpu, msr, ((msr) & 0x1) ? "" : "No-", ((msr) & 0x2) ? "" : "No-"); in print_hwp()
4969 cpu, msr, (msr >> 63) & 1 ? "" : "UN"); in print_rapl()
5002 cpu, msr, (msr >> 31) & 1 ? "" : "UN"); in print_rapl()
5016 cpu, msr, (msr >> 31) & 1 ? "" : "UN"); in print_rapl()
5028 cpu, msr, (msr >> 31) & 1 ? "" : "UN"); in print_rapl()
5336 base_cpu, msr, msr & FEAT_CTL_LOCKED ? "" : "UN-", msr & (1 << 18) ? "SGX" : ""); in decode_feature_control_msr()
5366 base_cpu, msr, msr & (0 << 0) ? "No-" : "", msr & (1 << 0) ? "No-" : "", in decode_misc_feature_control()
[all …]
/linux-6.3-rc2/arch/x86/kvm/
A Dmtrr.c30 switch (msr) { in msr_mtrr_valid()
60 if (!msr_mtrr_valid(msr)) in kvm_mtrr_valid()
63 if (msr == MSR_IA32_CR_PAT) { in kvm_mtrr_valid()
69 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid()
77 WARN_ON(!(msr >= 0x200 && msr < 0x200 + 2 * KVM_NR_VAR_MTRR)); in kvm_mtrr_valid()
80 if ((msr & 1) == 0) { in kvm_mtrr_valid()
185 switch (msr) { in fixed_msr_to_seg_unit()
347 index = (msr - 0x200) / 2; in set_var_mtrr_msr()
390 update_mtrr(vcpu, msr); in kvm_mtrr_set_msr()
399 if (msr == MSR_MTRRcap) { in kvm_mtrr_get_msr()
[all …]
/linux-6.3-rc2/arch/x86/xen/
A Dpmu.c138 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr()
140 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr()
214 switch (msr) { in xen_intel_pmu_emulate()
274 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate()
275 msr = get_fam15h_addr(msr); in xen_amd_pmu_emulate()
279 if (msr == amd_ctrls_base + off) { in xen_amd_pmu_emulate()
307 if (is_amd_pmu_msr(msr)) in pmu_msr_chk_emulated()
326 : native_read_msr(msr); in pmu_msr_read()
344 native_write_msr(msr, low, high); in pmu_msr_write()
358 uint32_t msr; in xen_amd_read_pmc() local
[all …]
/linux-6.3-rc2/arch/microblaze/kernel/
A Dprocess.c72 local_save_flags(childregs->msr); in copy_thread()
73 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread()
83 childregs->msr |= MSR_UMS; in copy_thread()
95 childregs->msr &= ~MSR_EIP; in copy_thread()
96 childregs->msr |= MSR_IE; in copy_thread()
97 childregs->msr &= ~MSR_VM; in copy_thread()
98 childregs->msr |= MSR_VMS; in copy_thread()
101 ti->cpu_context.msr = (childregs->msr|MSR_VM); in copy_thread()
103 ti->cpu_context.msr &= ~MSR_IE; in copy_thread()
128 regs->msr |= MSR_UMS; in start_thread()
[all …]
/linux-6.3-rc2/arch/x86/events/
A Dprobe.c19 perf_msr_probe(struct perf_msr *msr, int cnt, bool zero, void *data) in perf_msr_probe() argument
29 if (!msr[bit].no_check) { in perf_msr_probe()
30 struct attribute_group *grp = msr[bit].grp; in perf_msr_probe()
40 if (!msr[bit].msr) in perf_msr_probe()
43 if (msr[bit].test && !msr[bit].test(bit, data)) in perf_msr_probe()
46 if (rdmsrl_safe(msr[bit].msr, &val)) in perf_msr_probe()
49 mask = msr[bit].mask; in perf_msr_probe()
/linux-6.3-rc2/arch/powerpc/kernel/
A Dsignal_64.c130 unsigned long msr = regs->msr; in __unsafe_setup_sigcontext() local
147 msr |= MSR_VEC; in __unsafe_setup_sigcontext()
163 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext()
176 msr |= MSR_VSX; in __unsafe_setup_sigcontext()
237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
263 msr |= MSR_VEC; in setup_tm_sigcontexts()
285 if (msr & MSR_FP) in setup_tm_sigcontexts()
310 msr |= MSR_VSX; in setup_tm_sigcontexts()
576 regs_set_return_msr(regs, regs->msr | (msr & MSR_TS_MASK)); in restore_tm_sigcontexts()
865 unsigned long msr = regs->msr; in handle_rt_signal64() local
[all …]
A Dsignal_32.c270 unsigned long msr = regs->msr; in __unsafe_save_user_regs() local
282 msr |= MSR_VEC; in __unsafe_save_user_regs()
301 msr &= ~MSR_VSX; in __unsafe_save_user_regs()
311 msr |= MSR_VSX; in __unsafe_save_user_regs()
321 msr |= MSR_SPE; in __unsafe_save_user_regs()
398 msr |= MSR_VEC; in save_tm_user_regs_unsafe()
408 if (msr & MSR_VEC) in save_tm_user_regs_unsafe()
416 if (msr & MSR_FP) in save_tm_user_regs_unsafe()
434 msr |= MSR_VSX; in save_tm_user_regs_unsafe()
743 unsigned long msr = regs->msr; in handle_rt_signal32() local
[all …]
A Dcpu_setup_power.c19 u64 msr; in init_hvmode_206() local
21 msr = mfmsr(); in init_hvmode_206()
22 if (msr & MSR_HV) in init_hvmode_206()
147 u64 msr; in __restore_cpu_power7() local
149 msr = mfmsr(); in __restore_cpu_power7()
179 u64 msr; in __restore_cpu_power8() local
185 msr = mfmsr(); in __restore_cpu_power8()
219 u64 msr; in __restore_cpu_power9() local
224 msr = mfmsr(); in __restore_cpu_power9()
261 u64 msr; in __restore_cpu_power10() local
[all …]
A Dprocess.c155 unsigned long msr; in __giveup_fpu() local
158 msr = tsk->thread.regs->msr; in __giveup_fpu()
161 msr &= ~MSR_VSX; in __giveup_fpu()
241 msr = tsk->thread.regs->msr; in __giveup_altivec()
242 msr &= ~MSR_VEC; in __giveup_altivec()
244 msr &= ~MSR_VSX; in __giveup_altivec()
304 unsigned long msr = tsk->thread.regs->msr; in __giveup_vsx() local
310 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
313 if (msr & MSR_FP) in __giveup_vsx()
522 msr = regs->msr; in restore_math()
[all …]
/linux-6.3-rc2/arch/arm64/kvm/hyp/nvhe/
A Dhyp-init.S87 msr tpidr_el2, x1
93 msr mair_el2, x1
96 msr hcr_el2, x1
99 msr vttbr_el2, x1
102 msr vtcr_el2, x1
109 msr ttbr0_el2, x2
116 msr tcr_el2, x0
136 msr vbar_el2, x0
203 msr elr_el2, x1
205 msr spsr_el2, x0
[all …]
/linux-6.3-rc2/arch/x86/kvm/svm/
A Dpmu.c47 switch (msr) { in get_gp_pmc_amd()
56 if (!(msr & 0x1) != (type == PMU_TYPE_EVNTSEL)) in get_gp_pmc_amd()
62 idx = msr - MSR_K7_EVNTSEL0; in get_gp_pmc_amd()
67 idx = msr - MSR_K7_PERFCTR0; in get_gp_pmc_amd()
116 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_msr_idx_to_pmc()
126 u32 msr = msr_info->index; in amd_pmu_get_msr() local
129 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_pmu_get_msr()
135 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_EVNTSEL); in amd_pmu_get_msr()
148 u32 msr = msr_info->index; in amd_pmu_set_msr() local
152 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_pmu_set_msr()
[all …]
/linux-6.3-rc2/tools/power/x86/x86_energy_perf_policy/
A Dx86_energy_perf_policy.c691 retval = pread(fd, msr, sizeof(*msr), offset); in get_msr()
779 unsigned long long msr; in read_hwp_cap() local
814 unsigned long long msr; in read_hwp_request() local
917 unsigned long long msr; in print_pkg_msrs() local
929 pkg, msr, in print_pkg_msrs()
936 pkg, msr, in print_pkg_msrs()
1169 unsigned long long msr; in enable_hwp_on_cpu() local
1182 unsigned long long msr; in update_cpu_msrs() local
1332 unsigned long long msr; in verify_hwp_is_enabled() local
1339 if ((msr & 1) == 0) { in verify_hwp_is_enabled()
[all …]
/linux-6.3-rc2/arch/powerpc/kernel/ptrace/
A Dptrace-tm.c39 task->thread.ckpt_regs.msr &= ~MSR_DEBUGCHANGE; in set_user_ckpt_msr()
40 task->thread.ckpt_regs.msr |= msr & MSR_DEBUGCHANGE; in set_user_ckpt_msr()
63 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_active()
97 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_get()
144 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cgpr_set()
205 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_active()
238 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_get()
283 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cfpr_set()
317 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_active()
354 if (!MSR_TM_ACTIVE(target->thread.regs->msr)) in tm_cvmx_get()
[all …]

Completed in 70 milliseconds

12345678910>>...15