Home
last modified time | relevance | path

Searched refs:rdmsr (Results 1 – 25 of 26) sorted by relevance

12

/tools/testing/selftests/kvm/lib/x86/
A Dapic.c11 rdmsr(MSR_IA32_APICBASE) & in apic_disable()
17 uint64_t val = rdmsr(MSR_IA32_APICBASE); in xapic_enable()
23 rdmsr(MSR_IA32_APICBASE) | MSR_IA32_APICBASE_ENABLE); in xapic_enable()
39 wrmsr(MSR_IA32_APICBASE, rdmsr(MSR_IA32_APICBASE) | in x2apic_enable()
A Dvmx.c127 cr0 &= rdmsr(MSR_IA32_VMX_CR0_FIXED1); in prepare_for_vmx_operation()
128 cr0 |= rdmsr(MSR_IA32_VMX_CR0_FIXED0); in prepare_for_vmx_operation()
132 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation()
133 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation()
146 feature_control = rdmsr(MSR_IA32_FEAT_CTL); in prepare_for_vmx_operation()
178 return rdmsr(MSR_IA32_VMX_EPT_VPID_CAP) & mask; in ept_vpid_cap_supported()
262 vmwrite(HOST_IA32_PAT, rdmsr(MSR_IA32_CR_PAT)); in init_vmcs_host_state()
264 vmwrite(HOST_IA32_EFER, rdmsr(MSR_EFER)); in init_vmcs_host_state()
267 rdmsr(MSR_CORE_PERF_GLOBAL_CTRL)); in init_vmcs_host_state()
274 vmwrite(HOST_FS_BASE, rdmsr(MSR_FS_BASE)); in init_vmcs_host_state()
[all …]
A Dsvm.c74 efer = rdmsr(MSR_EFER); in generic_svm_setup()
89 save->efer = rdmsr(MSR_EFER); in generic_svm_setup()
96 save->g_pat = rdmsr(MSR_IA32_CR_PAT); in generic_svm_setup()
97 save->dbgctl = rdmsr(MSR_IA32_DEBUGCTLMSR); in generic_svm_setup()
/tools/testing/selftests/kvm/x86/
A Ducna_injection_test.c71 uint64_t msr = rdmsr(MSR_IA32_APICBASE); in verify_apic_base_addr()
85 ctl2 = rdmsr(MSR_IA32_MCx_CTL2(UCNA_BANK)); in ucna_injection_guest_code()
94 ucna_addr = rdmsr(MSR_IA32_MCx_ADDR(UCNA_BANK)); in ucna_injection_guest_code()
97 ctl2 = rdmsr(MSR_IA32_MCx_CTL2(UCNA_BANK)); in ucna_injection_guest_code()
103 ucna_addr2 = rdmsr(MSR_IA32_MCx_ADDR(UCNA_BANK)); in ucna_injection_guest_code()
109 uint64_t ctl2 = rdmsr(MSR_IA32_MCx_CTL2(UCNA_BANK)); in cmci_disabled_guest_code()
117 uint64_t ctl2 = rdmsr(MSR_IA32_MCx_CTL2(UCNA_BANK)); in cmci_enabled_guest_code()
126 i_ucna_addr = rdmsr(MSR_IA32_MCx_ADDR(UCNA_BANK)); in guest_cmci_handler()
A Damx_test.c139 GUEST_ASSERT(rdmsr(MSR_IA32_XFD) == 0); in guest_code()
170 GUEST_ASSERT(rdmsr(MSR_IA32_XFD) == XFEATURE_MASK_XTILE_DATA); in guest_code()
185 GUEST_ASSERT(rdmsr(MSR_IA32_XFD_ERR) == XFEATURE_MASK_XTILE_DATA); in guest_nm_handler()
186 GUEST_ASSERT(rdmsr(MSR_IA32_XFD) == XFEATURE_MASK_XTILE_DATA); in guest_nm_handler()
188 GUEST_ASSERT(rdmsr(MSR_IA32_XFD_ERR) == XFEATURE_MASK_XTILE_DATA); in guest_nm_handler()
189 GUEST_ASSERT(rdmsr(MSR_IA32_XFD) == XFEATURE_MASK_XTILE_DATA); in guest_nm_handler()
A Dpmu_event_filter_test.c80 uint64_t v = rdmsr(msr) ^ bits_to_flip; in check_msr()
83 if (rdmsr(msr) != v) in check_msr()
88 if (rdmsr(msr) != v) in check_msr()
94 const uint64_t branches_retired = rdmsr(msr_base + 0); in run_and_measure_loop()
95 const uint64_t insn_retired = rdmsr(msr_base + 1); in run_and_measure_loop()
421 const uint64_t loads = rdmsr(msr_base + 0); in masked_events_guest_test()
422 const uint64_t stores = rdmsr(msr_base + 1); in masked_events_guest_test()
423 const uint64_t loads_stores = rdmsr(msr_base + 2); in masked_events_guest_test()
432 pmc_results.loads = rdmsr(msr_base + 0) - loads; in masked_events_guest_test()
433 pmc_results.stores = rdmsr(msr_base + 1) - stores; in masked_events_guest_test()
[all …]
A Dvmx_preemption_timer_test.c86 basic.val = rdmsr(MSR_IA32_VMX_BASIC); in l1_guest_code()
87 ctrl_pin_rev.val = rdmsr(basic.ctrl ? MSR_IA32_VMX_TRUE_PINBASED_CTLS in l1_guest_code()
89 ctrl_exit_rev.val = rdmsr(basic.ctrl ? MSR_IA32_VMX_TRUE_EXIT_CTLS in l1_guest_code()
110 vmx_pt_rate = rdmsr(MSR_IA32_VMX_MISC) & 0x1F; in l1_guest_code()
A Dhyperv_clock.c56 tsc_freq = rdmsr(HV_X64_MSR_TSC_FREQUENCY); in check_tsc_msr_rdtsc()
61 t1 = rdmsr(HV_X64_MSR_TIME_REF_COUNT); in check_tsc_msr_rdtsc()
65 t2 = rdmsr(HV_X64_MSR_TIME_REF_COUNT); in check_tsc_msr_rdtsc()
90 r1 = rdmsr(HV_X64_MSR_TIME_REF_COUNT); in check_tsc_msr_tsc_page()
97 r2 = rdmsr(HV_X64_MSR_TIME_REF_COUNT); in check_tsc_msr_tsc_page()
A Dsev_smoke_test.c21 uint64_t sev_msr = rdmsr(MSR_AMD64_SEV); in guest_snp_code()
34 GUEST_ASSERT(rdmsr(MSR_AMD64_SEV) & MSR_AMD64_SEV_ENABLED); in guest_sev_es_code()
35 GUEST_ASSERT(rdmsr(MSR_AMD64_SEV) & MSR_AMD64_SEV_ES_ENABLED); in guest_sev_es_code()
48 GUEST_ASSERT(rdmsr(MSR_AMD64_SEV) & MSR_AMD64_SEV_ENABLED); in guest_sev_code()
A Dvmx_nested_tsc_scaling_test.c65 tsc_start = rdmsr(MSR_IA32_TSC); in check_tsc_freq()
67 tsc_end = rdmsr(MSR_IA32_TSC); in check_tsc_freq()
A Dplatform_info_test.c30 msr_platform_info = rdmsr(MSR_PLATFORM_INFO); in guest_code()
A Duserspace_msr_exit_test.c296 GUEST_ASSERT(rdmsr(MSR_SYSCALL_MASK) == MSR_SYSCALL_MASK); in guest_msr_calls()
297 GUEST_ASSERT(rdmsr(MSR_GS_BASE) == MSR_GS_BASE); in guest_msr_calls()
299 GUEST_ASSERT(rdmsr(MSR_SYSCALL_MASK) != MSR_SYSCALL_MASK); in guest_msr_calls()
300 GUEST_ASSERT(rdmsr(MSR_GS_BASE) != MSR_GS_BASE); in guest_msr_calls()
307 rdmsr(MSR_IA32_POWER_CTL); in guest_msr_calls()
310 GUEST_ASSERT(rdmsr(0xdeadbeef) == 0xdeadbeef); in guest_msr_calls()
A Daperfmperf_test.c54 GUEST_SYNC2(rdmsr(MSR_IA32_APERF), rdmsr(MSR_IA32_MPERF)); in guest_read_aperf_mperf()
A Dpmu_counters_test.c263 rdmsr(MSR_IA32_PERF_CAPABILITIES) & PMU_CAP_FW_WRITES) in guest_test_arch_event()
459 uint64_t global_ctrl = rdmsr(MSR_CORE_PERF_GLOBAL_CTRL); in guest_test_gp_counters()
468 rdmsr(MSR_IA32_PERF_CAPABILITIES) & PMU_CAP_FW_WRITES) in guest_test_gp_counters()
536 val = rdmsr(MSR_CORE_PERF_FIXED_CTR0 + i); in guest_test_fixed_counters()
A Dmonitor_mwait_test.c46 u64 val = rdmsr(MSR_IA32_MISC_ENABLE) & ~MSR_IA32_MISC_ENABLE_MWAIT; in guest_monitor_wait()
A Dvmx_tsc_adjust_test.c59 adjust = rdmsr(MSR_IA32_TSC_ADJUST); in check_ia32_tsc_adjust()
A Dhyperv_ipi.c61 vcpu_id = rdmsr(HV_X64_MSR_VP_INDEX); in receiver_code()
74 u32 vcpu_id = rdmsr(HV_X64_MSR_VP_INDEX); in guest_ipi_handler()
A Dsmm_test.c70 uint64_t apicbase = rdmsr(MSR_IA32_APICBASE); in guest_code()
A Dtsc_msrs_test.c16 #define rounded_rdmsr(x) ROUND(rdmsr(x))
A Dxapic_ipi_test.c83 uint64_t msr = rdmsr(MSR_IA32_APICBASE); in verify_apic_base_addr()
/tools/power/cpupower/debug/i386/
A Dcentrino-decode.c28 static int rdmsr(unsigned int cpu, unsigned int msr, in rdmsr() function
80 err = rdmsr(cpu, MSR_IA32_PERF_STATUS, &lo, &hi); in decode_live()
/tools/testing/selftests/kvm/include/x86/
A Dapic.h77 return rdmsr(MSR_IA32_APICBASE) & MSR_IA32_APICBASE_BSP; in get_bsp_flag()
92 return rdmsr(APIC_BASE_MSR + (reg >> 4)); in x2apic_read_reg()
A Dvmx.h495 return rdmsr(MSR_IA32_VMX_BASIC); in vmcs_revision()
A Dprocessor.h459 static inline uint64_t rdmsr(uint32_t msr) in rdmsr() function
1298 BUILD_READ_U64_SAFE_HELPERS(rdmsr)
/tools/testing/selftests/kvm/
A Dsteal_time.c47 GUEST_ASSERT_EQ(rdmsr(MSR_KVM_STEAL_TIME), ((uint64_t)st_gva[cpu] | KVM_MSR_ENABLED)); in guest_code()

Completed in 34 milliseconds

12