Searched refs:vcpu_get_guest_msr (Results 1 – 11 of 11) sorted by relevance
| /hypervisor/arch/x86/guest/ |
| A D | lock_instr_emul.c | 21 if ((vcpu_get_guest_msr(vcpu, MSR_TEST_CTL) & MSR_TEST_CTL_AC_SPLITLOCK) != 0UL) { in is_guest_ac_enabled() 32 if ((vcpu_get_guest_msr(vcpu, MSR_TEST_CTL) & MSR_TEST_CTL_GP_UCLOCK) != 0UL) { in is_guest_gp_enabled()
|
| A D | vmsr.c | 674 v = vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_ADJUST); in rdmsr_vmexit_handler() 723 v = vcpu_get_guest_msr(vcpu, MSR_IA32_PERF_CTL); in rdmsr_vmexit_handler() 747 v = vcpu_get_guest_msr(vcpu, MSR_IA32_PAT); in rdmsr_vmexit_handler() 763 v = vcpu_get_guest_msr(vcpu, MSR_IA32_FEATURE_CONTROL); in rdmsr_vmexit_handler() 774 v = vcpu_get_guest_msr(vcpu, MSR_IA32_MISC_ENABLE); in rdmsr_vmexit_handler() 814 v = vcpu_get_guest_msr(vcpu, msr); in rdmsr_vmexit_handler() 826 v = vcpu_get_guest_msr(vcpu, MSR_TEST_CTL); in rdmsr_vmexit_handler() 910 msr_write(MSR_IA32_TSC_DEADLINE, vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_DEADLINE)); in set_tsc_msr_interception() 948 tsc_adjust = vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_ADJUST); in set_guest_tsc() 994 tsc_adjust_delta = tsc_adjust - vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_ADJUST); in set_guest_tsc_adjust()
|
| A D | vcat.c | 211 *rval = vcpu_get_guest_msr(vcpu, vmsr); in read_vcbm() 382 *rval = vcpu_get_guest_msr(vcpu, MSR_IA32_PQR_ASSOC); in read_vclosid()
|
| A D | nested.c | 225 v = vcpu_get_guest_msr(vcpu, msr); in read_vmx_msr() 749 || ((vcpu_get_guest_msr(vcpu, MSR_IA32_FEATURE_CONTROL) & features) != features)) { in vmxon_vmexit_handler() 939 ((vcpu_get_guest_msr(vcpu, MSR_IA32_VMX_MISC) & (1UL << 29U)) == 0UL)) { in vmwrite_vmexit_handler() 1587 uint32_t supported_types = (vcpu_get_guest_msr(vcpu, MSR_IA32_VMX_EPT_VPID_CAP) >> 40U) & 0xfU; in invvpid_vmexit_handler()
|
| A D | vcpu.c | 135 uint64_t vcpu_get_guest_msr(const struct acrn_vcpu *vcpu, uint32_t msr) in vcpu_get_guest_msr() function 913 msr_write(MSR_IA32_XSS, vcpu_get_guest_msr(vcpu, MSR_IA32_XSS)); in rstore_xsave_area() 956 vmsr_val = vcpu_get_guest_msr(vcpu, MSR_IA32_UMWAIT_CONTROL); in context_switch_in()
|
| A D | vcpuid.c | 888 uint64_t guest_ia32_misc_enable = vcpu_get_guest_msr(vcpu, MSR_IA32_MISC_ENABLE); in guest_cpuid_80000001h() 911 uint64_t guest_ia32_misc_enable = vcpu_get_guest_msr(vcpu, MSR_IA32_MISC_ENABLE); in guest_limit_cpuid()
|
| A D | vept.c | 499 ept_cap_vmsr = vcpu_get_guest_msr(vcpu, MSR_IA32_VMX_EPT_VPID_CAP); in invept_vmexit_handler()
|
| A D | virtual_cr.c | 305 vcpu_get_guest_msr(vcpu, MSR_IA32_PAT)); in vmx_write_cr0()
|
| A D | vmcs.c | 462 value64 = vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_ADJUST) - cpu_msr_read(MSR_IA32_TSC_ADJUST); in init_exec_ctrl()
|
| A D | vlapic.c | 380 ret = vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_DEADLINE); in vlapic_get_tsc_deadline_msr() 386 vcpu_get_guest_msr(vcpu, MSR_IA32_TSC_DEADLINE); in vlapic_get_tsc_deadline_msr()
|
| /hypervisor/include/arch/x86/asm/guest/ |
| A D | vcpu.h | 500 uint64_t vcpu_get_guest_msr(const struct acrn_vcpu *vcpu, uint32_t msr);
|
Completed in 26 milliseconds