Searched refs:X86_CR0_PG (Results 1 – 14 of 14) sorted by relevance
115 if ( (cr0 & X86_CR0_PG) && in svm_vmcb_isvalid()141 if ( (efer & EFER_LME) && (cr0 & X86_CR0_PG) ) in svm_vmcb_isvalid()149 if ( (efer & EFER_LME) && (cr0 & X86_CR0_PG) && (cr4 & X86_CR4_PAE) && in svm_vmcb_isvalid()
295 if ( c->cr0 & X86_CR0_PG ) in svm_vmcb_restore()307 if ( v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG ) in svm_vmcb_restore()556 value |= X86_CR0_PG | X86_CR0_WP; in svm_update_guest_cr()
58 orl $(X86_CR0_PG | X86_CR0_PE), %eax156 andl $~X86_CR0_PG, %eax
43 #define X86_CR0_PG 0x80000000 /* Paging (RW) */ macro
144 mov $(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_NE |\
372 and $(~X86_CR0_PG),%edx
508 (c->cr[0] & X86_CR0_PG) ) in fuzz_read_msr()783 if ( c->cr[0] & X86_CR0_PG ) in sanitize_input()
928 X86_CR0_CD | X86_CR0_PG)))993 ((ctxt.cr0 & (X86_CR0_PE|X86_CR0_PG)) == X86_CR0_PG) ) in hvm_load_cpu_ctxt()1007 errstr = hvm_efer_valid(v, ctxt.msr_efer, MASK_EXTR(ctxt.cr0, X86_CR0_PG)); in hvm_load_cpu_ctxt()2209 (value & (X86_CR0_PE | X86_CR0_PG)) == X86_CR0_PG ) in hvm_set_cr0()2227 if ( (value & X86_CR0_PG) && !(old_value & X86_CR0_PG) ) in hvm_set_cr0()2262 else if ( !(value & X86_CR0_PG) && (old_value & X86_CR0_PG) ) in hvm_set_cr0()2294 if ( (value ^ old_value) & X86_CR0_PG ) { in hvm_set_cr0()3879 if ( v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG ) in hvm_vcpu_reset_state()
225 if ( !(regs->cr0 & X86_CR0_PG) ) in arch_set_info_hvm_guest()
690 if ( !(curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG) ) in hvmemul_linear_to_phys()
286 (!!((v)->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG))
245 X86_CR0_AM | X86_CR0_PG); in efi_arch_post_exit_boot()
772 if ( cr0 & X86_CR0_PG ) in vmx_restore_cr0_cr3()1551 hw_cr0_mask |= X86_CR0_PG | X86_CR0_PE; in vmx_update_guest_cr()
2129 data = X86_CR0_PE | X86_CR0_PG; in nvmx_msr_read_intercept()
Completed in 38 milliseconds