Searched refs:MSR_EFER (Results 1 – 25 of 25) sorted by relevance
| /arch/x86/kernel/acpi/ |
| A D | sleep.c | 85 if (!rdmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel() 88 !wrmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
|
| /arch/x86/boot/startup/ |
| A D | efi-mixed.S | 74 movl $MSR_EFER, %ecx 114 movl $MSR_EFER, %ecx
|
| A D | la57toggle.S | 74 movl $MSR_EFER, %ecx
|
| /arch/x86/realmode/rm/ |
| A D | reboot.S | 40 movl $MSR_EFER, %ecx
|
| A D | wakeup_asm.S | 123 movl $MSR_EFER, %ecx
|
| A D | trampoline_64.S | 164 movl $MSR_EFER, %ecx
|
| /arch/x86/power/ |
| A D | cpu.c | 119 rdmsrq(MSR_EFER, ctxt->efer); in __save_processor_state() 212 wrmsrq(MSR_EFER, ctxt->efer); in __restore_processor_state()
|
| /arch/x86/realmode/ |
| A D | init.c | 151 rdmsrq(MSR_EFER, efer); in setup_real_mode()
|
| /arch/x86/hyperv/ |
| A D | hv_vtl.c | 158 input->vp_context.efer = native_rdmsrq(MSR_EFER); in hv_vtl_bringup_vcpu()
|
| A D | ivm.c | 330 vmsa->efer = native_read_msr(MSR_EFER); in hv_snp_boot_ap()
|
| /arch/x86/platform/pvh/ |
| A D | head.S | 101 mov $MSR_EFER, %ecx
|
| /arch/x86/kernel/ |
| A D | head_32.S | 213 movl $MSR_EFER, %ecx
|
| A D | head_64.S | 384 movl $MSR_EFER, %ecx
|
| /arch/x86/kernel/cpu/ |
| A D | amd.c | 1143 WARN_ON_ONCE(msr_set_bit(MSR_EFER, _EFER_AUTOIBRS) < 0); in init_amd() 1150 msr_set_bit(MSR_EFER, _EFER_TCE); in init_amd()
|
| A D | bugs.c | 2411 msr_set_bit(MSR_EFER, _EFER_AUTOIBRS); in spectre_v2_apply_mitigation()
|
| /arch/x86/boot/compressed/ |
| A D | head_64.S | 238 movl $MSR_EFER, %ecx
|
| /arch/x86/kvm/ |
| A D | smm.c | 532 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64() 623 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
|
| A D | emulate.c | 792 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode() 1518 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr() 1692 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor() 2386 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall() 2437 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter() 3888 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
|
| A D | x86.c | 3868 case MSR_EFER: in kvm_set_msr_common() 4349 case MSR_EFER: in kvm_get_msr_common() 9684 rdmsrq_safe(MSR_EFER, &kvm_host.efer); in kvm_x86_vendor_init()
|
| /arch/x86/kvm/vmx/ |
| A D | vmx.c | 938 case MSR_EFER: in clear_atomic_switch_msr() 991 case MSR_EFER: in add_atomic_switch_msr() 1081 add_atomic_switch_msr(vmx, MSR_EFER, in update_transition_efer() 1084 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1088 i = kvm_find_user_return_msr(MSR_EFER); in update_transition_efer() 1092 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1854 vmx_setup_uret_msr(vmx, MSR_EFER, update_transition_efer(vmx)); in vmx_setup_uret_msrs() 1979 case MSR_EFER: in vmx_get_msr() 2168 case MSR_EFER: in vmx_set_msr() 3117 if (!vmx_find_uret_msr(vmx, MSR_EFER)) in vmx_set_efer() [all …]
|
| A D | tdx.c | 2182 case MSR_EFER: in tdx_has_emulated_msr() 2216 return index == MSR_IA32_APICBASE || index == MSR_EFER || in tdx_is_read_only_msr()
|
| A D | nested.c | 4836 if (vmx->msr_autoload.guest.val[i].index == MSR_EFER) in nested_vmx_get_vmcs01_guest_efer() 4840 efer_msr = vmx_find_uret_msr(vmx, MSR_EFER); in nested_vmx_get_vmcs01_guest_efer()
|
| /arch/x86/include/asm/ |
| A D | msr-index.h | 10 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
|
| /arch/x86/kvm/svm/ |
| A D | svm.c | 489 rdmsrq(MSR_EFER, efer); in kvm_cpu_svm_disable() 496 wrmsrq(MSR_EFER, efer & ~EFER_SVME); in kvm_cpu_svm_disable() 525 rdmsrq(MSR_EFER, efer); in svm_enable_virtualization_cpu() 535 wrmsrq(MSR_EFER, efer | EFER_SVME); in svm_enable_virtualization_cpu() 2704 msr_info.index = MSR_EFER; in efer_trap()
|
| A D | sev.c | 4447 svm_disable_intercept_for_msr(vcpu, MSR_EFER, MSR_TYPE_RW); in sev_es_recalc_msr_intercepts()
|
Completed in 123 milliseconds