Home
last modified time | relevance | path

Searched refs:MSR_EFER (Results 1 – 25 of 25) sorted by relevance

/arch/x86/kernel/acpi/
A Dsleep.c85 if (!rdmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
88 !wrmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
/arch/x86/boot/startup/
A Defi-mixed.S74 movl $MSR_EFER, %ecx
114 movl $MSR_EFER, %ecx
A Dla57toggle.S74 movl $MSR_EFER, %ecx
/arch/x86/realmode/rm/
A Dreboot.S40 movl $MSR_EFER, %ecx
A Dwakeup_asm.S123 movl $MSR_EFER, %ecx
A Dtrampoline_64.S164 movl $MSR_EFER, %ecx
/arch/x86/power/
A Dcpu.c119 rdmsrq(MSR_EFER, ctxt->efer); in __save_processor_state()
212 wrmsrq(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/realmode/
A Dinit.c151 rdmsrq(MSR_EFER, efer); in setup_real_mode()
/arch/x86/hyperv/
A Dhv_vtl.c158 input->vp_context.efer = native_rdmsrq(MSR_EFER); in hv_vtl_bringup_vcpu()
A Divm.c330 vmsa->efer = native_read_msr(MSR_EFER); in hv_snp_boot_ap()
/arch/x86/platform/pvh/
A Dhead.S101 mov $MSR_EFER, %ecx
/arch/x86/kernel/
A Dhead_32.S213 movl $MSR_EFER, %ecx
A Dhead_64.S384 movl $MSR_EFER, %ecx
/arch/x86/kernel/cpu/
A Damd.c1143 WARN_ON_ONCE(msr_set_bit(MSR_EFER, _EFER_AUTOIBRS) < 0); in init_amd()
1150 msr_set_bit(MSR_EFER, _EFER_TCE); in init_amd()
A Dbugs.c2411 msr_set_bit(MSR_EFER, _EFER_AUTOIBRS); in spectre_v2_apply_mitigation()
/arch/x86/boot/compressed/
A Dhead_64.S238 movl $MSR_EFER, %ecx
/arch/x86/kvm/
A Dsmm.c532 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64()
623 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
A Demulate.c792 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode()
1518 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1692 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
2386 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2437 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
3888 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
A Dx86.c3868 case MSR_EFER: in kvm_set_msr_common()
4349 case MSR_EFER: in kvm_get_msr_common()
9684 rdmsrq_safe(MSR_EFER, &kvm_host.efer); in kvm_x86_vendor_init()
/arch/x86/kvm/vmx/
A Dvmx.c938 case MSR_EFER: in clear_atomic_switch_msr()
991 case MSR_EFER: in add_atomic_switch_msr()
1081 add_atomic_switch_msr(vmx, MSR_EFER, in update_transition_efer()
1084 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer()
1088 i = kvm_find_user_return_msr(MSR_EFER); in update_transition_efer()
1092 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer()
1854 vmx_setup_uret_msr(vmx, MSR_EFER, update_transition_efer(vmx)); in vmx_setup_uret_msrs()
1979 case MSR_EFER: in vmx_get_msr()
2168 case MSR_EFER: in vmx_set_msr()
3117 if (!vmx_find_uret_msr(vmx, MSR_EFER)) in vmx_set_efer()
[all …]
A Dtdx.c2182 case MSR_EFER: in tdx_has_emulated_msr()
2216 return index == MSR_IA32_APICBASE || index == MSR_EFER || in tdx_is_read_only_msr()
A Dnested.c4836 if (vmx->msr_autoload.guest.val[i].index == MSR_EFER) in nested_vmx_get_vmcs01_guest_efer()
4840 efer_msr = vmx_find_uret_msr(vmx, MSR_EFER); in nested_vmx_get_vmcs01_guest_efer()
/arch/x86/include/asm/
A Dmsr-index.h10 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
/arch/x86/kvm/svm/
A Dsvm.c489 rdmsrq(MSR_EFER, efer); in kvm_cpu_svm_disable()
496 wrmsrq(MSR_EFER, efer & ~EFER_SVME); in kvm_cpu_svm_disable()
525 rdmsrq(MSR_EFER, efer); in svm_enable_virtualization_cpu()
535 wrmsrq(MSR_EFER, efer | EFER_SVME); in svm_enable_virtualization_cpu()
2704 msr_info.index = MSR_EFER; in efer_trap()
A Dsev.c4447 svm_disable_intercept_for_msr(vcpu, MSR_EFER, MSR_TYPE_RW); in sev_es_recalc_msr_intercepts()

Completed in 123 milliseconds