Home
last modified time | relevance | path

Searched refs:efer (Results 1 – 25 of 26) sorted by relevance

12

/arch/x86/realmode/
A Dinit.c103 u64 efer; in setup_real_mode() local
151 rdmsrq(MSR_EFER, efer); in setup_real_mode()
152 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
/arch/x86/include/asm/
A Dsuspend_64.h45 unsigned long efer; member
A Drealmode.h52 u64 efer;
A Dsvm.h329 u64 efer; member
385 u64 efer; member
A Dkvm_host.h799 u64 efer; member
1743 int (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
2151 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/kvm/
A Dsmm.c82 CHECK_SMRAM64_OFFSET(efer, 0xFED0); in check_smram_offsets()
250 smram->efer = vcpu->arch.efer; in enter_smm_save_state_64()
532 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64()
614 unsigned long cr4, efer; in emulator_leave_smm() local
622 efer = 0; in emulator_leave_smm()
623 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
A Dsmm.h104 u64 efer; member
A Demulate.c787 u64 efer; in emulator_recalc_and_set_mode() local
796 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
804 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
813 if (efer & EFER_LMA) { in emulator_recalc_and_set_mode()
1516 u64 efer = 0; in get_descriptor_ptr() local
1690 u64 efer = 0; in __load_segment_descriptor() local
1693 if (efer & EFER_LMA) in __load_segment_descriptor()
2368 u64 efer = 0; in em_syscall() local
2435 u64 efer = 0; in em_sysenter() local
2476 if (efer & EFER_LMA) in em_sysenter()
[all …]
A Dx86.h50 u64 efer; member
222 return !!(vcpu->arch.efer & EFER_LMA); in is_long_mode()
A Dmmu.h88 unsigned long cr4, u64 efer, gpa_t nested_cr3);
A Dx86.c1721 if (efer & efer_reserved_bits) in kvm_valid_efer()
1730 u64 old_efer = vcpu->arch.efer; in set_efer()
1731 u64 efer = msr_info->data; in set_efer() local
1734 if (efer & efer_reserved_bits) in set_efer()
1742 (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()
1746 efer &= ~EFER_LMA; in set_efer()
1747 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
1759 (efer & EFER_SVME)) in set_efer()
11702 sregs->efer = vcpu->arch.efer; in __get_sregs_common()
11906 *mmu_reset_needed |= vcpu->arch.efer != sregs->efer; in __set_sregs_common()
[all …]
/arch/x86/kvm/svm/
A Dnested.c98 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
358 if (CC(!(save->efer & EFER_SVME))) in __nested_vmcb_check_save()
373 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in __nested_vmcb_check_save()
384 if (CC(!kvm_valid_efer(vcpu, save->efer))) in __nested_vmcb_check_save()
466 to->efer = from->efer; in __nested_copy_vmcb_save_to_cache()
641 svm_set_efer(vcpu, svm->nested.save.efer); in nested_vmcb02_prepare_save()
993 vmcb01->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
1039 to_save->efer = from_save->efer; in svm_copy_vmrun_state()
1101 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
1219 svm_set_efer(vcpu, vmcb01->save.efer); in nested_svm_vmexit()
[all …]
A Dsvm.c213 vcpu->arch.efer = efer; in svm_set_efer()
217 efer |= EFER_NX; in svm_set_efer()
219 if (!(efer & EFER_LMA)) in svm_set_efer()
220 efer &= ~EFER_LME; in svm_set_efer()
256 svm->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
486 uint64_t efer; in kvm_cpu_svm_disable() local
489 rdmsrq(MSR_EFER, efer); in kvm_cpu_svm_disable()
490 if (efer & EFER_SVME) { in kvm_cpu_svm_disable()
522 uint64_t efer; in svm_enable_virtualization_cpu() local
525 rdmsrq(MSR_EFER, efer); in svm_enable_virtualization_cpu()
[all …]
A Dsvm.h141 u64 efer; member
691 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/power/
A Dcpu.c119 rdmsrq(MSR_EFER, ctxt->efer); in __save_processor_state()
212 wrmsrq(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/hyperv/
A Dhv_vtl.c158 input->vp_context.efer = native_rdmsrq(MSR_EFER); in hv_vtl_bringup_vcpu()
A Divm.c330 vmsa->efer = native_read_msr(MSR_EFER); in hv_snp_boot_ap()
/arch/x86/include/uapi/asm/
A Dkvm.h151 __u64 efer; member
162 __u64 efer; member
/arch/x86/kvm/vmx/
A Dx86_ops.h71 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
A Dvmx.c1052 u64 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
1077 (enable_ept && ((vmx->vcpu.arch.efer ^ kvm_host.efer) & EFER_NX))) { in update_transition_efer()
1080 if (guest_efer != kvm_host.efer) in update_transition_efer()
1082 guest_efer, kvm_host.efer, false); in update_transition_efer()
1095 guest_efer |= kvm_host.efer & ignore_bits; in update_transition_efer()
1848 (vmx->vcpu.arch.efer & EFER_SCE); in vmx_setup_uret_msrs()
3120 vcpu->arch.efer = efer; in vmx_set_efer()
3122 if (efer & EFER_LMA) in vmx_set_efer()
3127 if (KVM_BUG_ON(efer & EFER_LMA, vcpu->kvm)) in vmx_set_efer()
3301 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()
[all …]
A Dnested.c2261 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2485 if (guest_efer != kvm_host.efer) in prepare_vmcs02_early()
2730 vmx_set_efer(vcpu, vcpu->arch.efer); in prepare_vmcs02()
3024 !!(vcpu->arch.efer & EFER_LMA))) in nested_vmx_check_address_space_size()
4629 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
4706 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
4708 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()
4710 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
4711 vmx_set_efer(vcpu, vcpu->arch.efer); in load_vmcs12_host_state()
4833 return kvm_host.efer; in nested_vmx_get_vmcs01_guest_efer()
[all …]
A Dmain.c416 static int vt_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vt_set_efer() argument
421 return vmx_set_efer(vcpu, efer); in vt_set_efer()
A Dvmx.h362 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/kvm/mmu/
A Dmmu.c189 const u64 efer; member
214 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, nx, EFER_NX);
215 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);
234 BUILD_MMU_ROLE_ACCESSOR(base, efer, nx);
235 BUILD_MMU_ROLE_ACCESSOR(ext, efer, lma);
252 .efer = vcpu->arch.efer, in vcpu_to_role_regs()
5764 unsigned long cr4, u64 efer, gpa_t nested_cr3) in kvm_init_shadow_npt_mmu() argument
5770 .efer = efer, in kvm_init_shadow_npt_mmu()
/arch/x86/coco/sev/
A Dcore.c977 vmsa->efer = EFER_SVME; in wakeup_cpu_via_vmgexit()

Completed in 138 milliseconds

12