/xen-4.10.0-shim-comet/xen/arch/x86/hvm/ |
A D | domain.c | 206 v->arch.hvm_vcpu.guest_cr[0] = regs->cr0; in arch_set_info_hvm_guest() 207 v->arch.hvm_vcpu.guest_cr[3] = regs->cr3; in arch_set_info_hvm_guest() 208 v->arch.hvm_vcpu.guest_cr[4] = regs->cr4; in arch_set_info_hvm_guest() 257 v->arch.hvm_vcpu.guest_cr[0] = regs->cr0; in arch_set_info_hvm_guest() 258 v->arch.hvm_vcpu.guest_cr[3] = regs->cr3; in arch_set_info_hvm_guest() 259 v->arch.hvm_vcpu.guest_cr[4] = regs->cr4; in arch_set_info_hvm_guest() 275 if ( v->arch.hvm_vcpu.guest_cr[4] & ~hvm_cr4_guest_valid_bits(v, 0) ) in arch_set_info_hvm_guest() 278 v->arch.hvm_vcpu.guest_cr[4]); in arch_set_info_hvm_guest() 299 v->arch.hvm_vcpu.guest_cr[3] >> PAGE_SHIFT, in arch_set_info_hvm_guest() 304 v->arch.hvm_vcpu.guest_cr[3]); in arch_set_info_hvm_guest()
|
A D | hvm.c | 501 info->cr2 = v->arch.hvm_vcpu.guest_cr[2]; in hvm_get_pending_event() 2124 val = curr->arch.hvm_vcpu.guest_cr[cr]; in hvm_mov_from_cr() 2181 v->arch.hvm_vcpu.guest_cr[cr] = value; in hvm_update_cr() 2326 (value != v->arch.hvm_vcpu.guest_cr[3]) ) in hvm_set_cr3() 2341 v->arch.hvm_vcpu.guest_cr[3] = value; in hvm_set_cr3() 2374 old_cr = v->arch.hvm_vcpu.guest_cr[4]; in hvm_set_cr4() 3070 v->arch.hvm_vcpu.guest_cr[0] |= X86_CR0_TS; in hvm_task_switch() 3900 v->arch.hvm_vcpu.guest_cr[0] = X86_CR0_ET; in hvm_vcpu_reset_state() 3903 v->arch.hvm_vcpu.guest_cr[2] = 0; in hvm_vcpu_reset_state() 3906 v->arch.hvm_vcpu.guest_cr[3] = 0; in hvm_vcpu_reset_state() [all …]
|
A D | emulate.c | 690 if ( !(curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG) ) in hvmemul_linear_to_phys() 1768 *val = current->arch.hvm_vcpu.guest_cr[reg]; in hvmemul_read_cr() 1793 current->arch.hvm_vcpu.guest_cr[2] = val; in hvmemul_write_cr()
|
/xen-4.10.0-shim-comet/xen/include/asm-x86/hvm/ |
A D | vcpu.h | 134 unsigned long guest_cr[5]; member 148 unsigned long guest_cr[5]; member
|
A D | hvm.h | 286 (!!((v)->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PG)) 288 (!!((v)->arch.hvm_vcpu.guest_cr[0] & X86_CR0_WP)) 290 (!!((v)->arch.hvm_vcpu.guest_cr[4] & X86_CR4_PCIDE)) 292 (hvm_paging_enabled(v) && ((v)->arch.hvm_vcpu.guest_cr[4] & X86_CR4_PAE)) 294 (hvm_paging_enabled(v) && ((v)->arch.hvm_vcpu.guest_cr[4] & X86_CR4_SMEP)) 296 (hvm_paging_enabled(v) && ((v)->arch.hvm_vcpu.guest_cr[4] & X86_CR4_SMAP)) 302 (hvm_paging_enabled(v) && ((v)->arch.hvm_vcpu.guest_cr[4] & X86_CR4_PKE))
|
A D | nestedhvm.h | 80 v->arch.hvm_vcpu.nvcpu.guest_cr[cr] = value; in nestedhvm_set_cr()
|
/xen-4.10.0-shim-comet/xen/arch/x86/hvm/vmx/ |
A D | realmode.c | 118 if ( curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PE ) in vmx_realmode_emulate_one() 132 else if ( curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PE ) in vmx_realmode_emulate_one() 171 if ( !(curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PE) && in vmx_realmode()
|
A D | vmx.c | 739 c->cr0 = v->arch.hvm_vcpu.guest_cr[0]; in vmx_vmcs_save() 740 c->cr2 = v->arch.hvm_vcpu.guest_cr[2]; in vmx_vmcs_save() 741 c->cr3 = v->arch.hvm_vcpu.guest_cr[3]; in vmx_vmcs_save() 742 c->cr4 = v->arch.hvm_vcpu.guest_cr[4]; in vmx_vmcs_save() 790 v->arch.hvm_vcpu.guest_cr[0] = cr0 | X86_CR0_ET; in vmx_restore_cr0_cr3() 791 v->arch.hvm_vcpu.guest_cr[3] = cr3; in vmx_restore_cr0_cr3() 825 v->arch.hvm_vcpu.guest_cr[2] = c->cr2; in vmx_vmcs_restore() 826 v->arch.hvm_vcpu.guest_cr[4] = c->cr4; in vmx_vmcs_restore() 1468 unsigned long cr3 = v->arch.hvm_vcpu.guest_cr[3]; in vmx_load_pdptrs() 1864 curr->arch.hvm_vcpu.guest_cr[2] = _event.cr2; in vmx_inject_event() [all …]
|
A D | vvmx.c | 390 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PE) || in vmx_inst_check_privilege() 391 !(v->arch.hvm_vcpu.guest_cr[4] & X86_CR4_VMXE) ) in vmx_inst_check_privilege() 1063 nvcpu->guest_cr[0] = get_vvmcs(v, CR0_READ_SHADOW); in load_shadow_guest_state() 1064 nvcpu->guest_cr[4] = get_vvmcs(v, CR4_READ_SHADOW); in load_shadow_guest_state() 2621 v->arch.hvm_vcpu.guest_cr[cr] &= ~virtual_cr_mask; in nvmx_set_cr_read_shadow() 2622 v->arch.hvm_vcpu.guest_cr[cr] |= virtual_cr_mask & in nvmx_set_cr_read_shadow() 2627 __vmwrite(read_shadow_field, v->arch.hvm_vcpu.nvcpu.guest_cr[cr]); in nvmx_set_cr_read_shadow()
|
A D | vmcs.c | 1232 v->arch.hvm_vcpu.guest_cr[0] = X86_CR0_PE | X86_CR0_ET; in construct_vmcs() 1235 v->arch.hvm_vcpu.guest_cr[4] = 0; in construct_vmcs()
|
/xen-4.10.0-shim-comet/xen/arch/x86/x86_64/ |
A D | traps.c | 112 fault_crs[0] = v->arch.hvm_vcpu.guest_cr[0]; in show_registers() 113 fault_crs[2] = v->arch.hvm_vcpu.guest_cr[2]; in show_registers() 114 fault_crs[3] = v->arch.hvm_vcpu.guest_cr[3]; in show_registers() 115 fault_crs[4] = v->arch.hvm_vcpu.guest_cr[4]; in show_registers()
|
A D | asm-offsets.c | 111 OFFSET(VCPU_hvm_guest_cr2, struct vcpu, arch.hvm_vcpu.guest_cr[2]); in __dummy__()
|
/xen-4.10.0-shim-comet/xen/arch/x86/hvm/svm/ |
A D | nestedsvm.c | 247 n1vmcb->_cr0 = v->arch.hvm_vcpu.guest_cr[0]; in nsvm_vcpu_hostsave() 248 n1vmcb->_cr2 = v->arch.hvm_vcpu.guest_cr[2]; in nsvm_vcpu_hostsave() 249 n1vmcb->_cr4 = v->arch.hvm_vcpu.guest_cr[4]; in nsvm_vcpu_hostsave() 287 v->arch.hvm_vcpu.guest_cr[4] = n1vmcb->_cr4; in nsvm_vcpu_hostrestore() 296 svm->ns_cr0, v->arch.hvm_vcpu.guest_cr[0]); in nsvm_vcpu_hostrestore() 304 svm->ns_cr0 = v->arch.hvm_vcpu.guest_cr[0]; in nsvm_vcpu_hostrestore() 307 v->arch.hvm_vcpu.guest_cr[2] = n1vmcb->_cr2; in nsvm_vcpu_hostrestore() 560 v->arch.hvm_vcpu.guest_cr[4] = ns_vmcb->_cr4; in nsvm_vmcb_prepare4vmrun() 568 svm->ns_cr0 = v->arch.hvm_vcpu.guest_cr[0]; in nsvm_vmcb_prepare4vmrun() 570 v->arch.hvm_vcpu.guest_cr[0] = ns_vmcb->_cr0; in nsvm_vmcb_prepare4vmrun() [all …]
|
A D | svm.c | 246 c->cr0 = v->arch.hvm_vcpu.guest_cr[0]; in svm_vmcb_save() 247 c->cr2 = v->arch.hvm_vcpu.guest_cr[2]; in svm_vmcb_save() 248 c->cr3 = v->arch.hvm_vcpu.guest_cr[3]; in svm_vmcb_save() 249 c->cr4 = v->arch.hvm_vcpu.guest_cr[4]; in svm_vmcb_save() 314 v->arch.hvm_vcpu.guest_cr[0] = c->cr0 | X86_CR0_ET; in svm_vmcb_restore() 315 v->arch.hvm_vcpu.guest_cr[2] = c->cr2; in svm_vmcb_restore() 316 v->arch.hvm_vcpu.guest_cr[3] = c->cr3; in svm_vmcb_restore() 317 v->arch.hvm_vcpu.guest_cr[4] = c->cr4; in svm_vmcb_restore() 486 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in svm_fpu_leave() 578 value |= v->arch.hvm_vcpu.guest_cr[4]; in svm_update_guest_cr() [all …]
|
A D | vmcb.c | 172 v->arch.hvm_vcpu.guest_cr[0] = X86_CR0_PE | X86_CR0_ET; in construct_vmcb() 175 v->arch.hvm_vcpu.guest_cr[4] = 0; in construct_vmcb()
|
/xen-4.10.0-shim-comet/xen/arch/x86/mm/hap/ |
A D | guest_walk.c | 45 unsigned long cr3 = v->arch.hvm_vcpu.guest_cr[3]; in hap_gva_to_gfn()
|
A D | hap.c | 674 v->arch.hvm_vcpu.hw_cr[3] = v->arch.hvm_vcpu.guest_cr[3]; in hap_update_cr3() 690 unsigned long cr3_gfn = v->arch.hvm_vcpu.guest_cr[3] >> PAGE_SHIFT; in hap_update_paging_modes()
|
/xen-4.10.0-shim-comet/xen/arch/x86/ |
A D | domctl.c | 1609 c.nat->ctrlreg[0] = v->arch.hvm_vcpu.guest_cr[0]; in arch_get_info_guest() 1610 c.nat->ctrlreg[2] = v->arch.hvm_vcpu.guest_cr[2]; in arch_get_info_guest() 1611 c.nat->ctrlreg[3] = v->arch.hvm_vcpu.guest_cr[3]; in arch_get_info_guest() 1612 c.nat->ctrlreg[4] = v->arch.hvm_vcpu.guest_cr[4]; in arch_get_info_guest()
|
A D | cpuid.c | 715 if ( v->arch.hvm_vcpu.guest_cr[4] & X86_CR4_OSXSAVE ) in guest_cpuid() 846 : v->arch.hvm_vcpu.guest_cr[4]) & X86_CR4_PKE ) in guest_cpuid()
|
/xen-4.10.0-shim-comet/xen/include/asm-x86/ |
A D | guest_pt.h | 218 (v->arch.hvm_vcpu.guest_cr[4] & X86_CR4_PSE)); in guest_can_use_l2_superpages()
|
/xen-4.10.0-shim-comet/xen/arch/x86/cpu/ |
A D | vpmu.c | 305 if ( !(sampled->arch.hvm_vcpu.guest_cr[0] & X86_CR0_PE) ) in vpmu_do_interrupt()
|
/xen-4.10.0-shim-comet/xen/arch/x86/mm/shadow/ |
A D | multi.c | 4092 guest_idx = guest_index((void *)v->arch.hvm_vcpu.guest_cr[3]); in sh_update_cr3() 4578 gcr3 = (v->arch.hvm_vcpu.guest_cr[3]); in sh_pagetable_dying()
|