Searched refs:EFER_LME (Results 1 – 10 of 10) sorted by relevance
134 if ( efer & ~(EFER_SCE | EFER_LME | EFER_LMA | EFER_NX | EFER_SVME | in svm_vmcb_isvalid()141 if ( (efer & EFER_LME) && (cr0 & X86_CR0_PG) ) in svm_vmcb_isvalid()149 if ( (efer & EFER_LME) && (cr0 & X86_CR0_PG) && (cr4 & X86_CR4_PAE) && in svm_vmcb_isvalid()
610 new_efer = (v->arch.hvm_vcpu.guest_efer | EFER_SVME) & ~EFER_LME; in svm_update_guest_efer()612 new_efer |= EFER_LME; in svm_update_guest_efer()
188 if ( (regs->efer & EFER_LMA) && !(regs->efer & EFER_LME) ) in arch_set_info_hvm_guest()239 if ( !(regs->efer & EFER_LME) ) in arch_set_info_hvm_guest()272 if ( v->arch.hvm_vcpu.guest_efer & EFER_LME ) in arch_set_info_hvm_guest()
901 if ( (value & (EFER_LME | EFER_LMA)) && !p->extd.lm ) in hvm_efer_valid()904 if ( (value & EFER_LMA) && (!(value & EFER_LME) || !cr0_pg) ) in hvm_efer_valid()1985 if ( ((value ^ v->arch.hvm_vcpu.guest_efer) & EFER_LME) && in hvm_set_efer()1993 if ( (value & EFER_LME) && !(v->arch.hvm_vcpu.guest_efer & EFER_LME) ) in hvm_set_efer()2229 if ( v->arch.hvm_vcpu.guest_efer & EFER_LME ) in hvm_set_cr0()4847 v->arch.hvm_vcpu.guest_efer &= ~(EFER_LMA | EFER_LME); in hvm_set_mode()4850 v->arch.hvm_vcpu.guest_efer |= (EFER_LMA | EFER_LME); in hvm_set_mode()
162 andl $~EFER_LME, %eax
138 or $EFER_LME|EFER_SCE,%eax /* Long Mode + SYSCALL/SYSRET */
27 #define EFER_LME (1<<_EFER_LME) macro
1182 v->arch.hvm_vcpu.guest_efer |= EFER_LMA | EFER_LME; in virtual_vmentry()1184 v->arch.hvm_vcpu.guest_efer &= ~(EFER_LMA | EFER_LME); in virtual_vmentry()1385 v->arch.hvm_vcpu.guest_efer |= EFER_LMA | EFER_LME; in virtual_vmexit()1387 v->arch.hvm_vcpu.guest_efer &= ~(EFER_LMA | EFER_LME); in virtual_vmexit()
507 if ( (*val & EFER_LME) && (c->cr[4] & X86_CR4_PAE) && in fuzz_read_msr()
896 *val &= ~(EFER_LME | EFER_LMA | EFER_LMSLE); in read_msr()
Completed in 26 milliseconds