Lines Matching refs:hvm_svm

92     if ( vmcb_get_cpl(v->arch.hvm_svm.vmcb) )  in svm_crash_or_fault()
117 curr->arch.hvm_svm.vmcb->interrupt_shadow = 0; in __update_guest_eip()
150 msr_bit = svm_msrbit(v->arch.hvm_svm.msrpm, msr); in svm_intercept_msr()
167 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_save_dr()
184 rdmsrl(MSR_AMD64_DR0_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[0]); in svm_save_dr()
185 rdmsrl(MSR_AMD64_DR1_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[1]); in svm_save_dr()
186 rdmsrl(MSR_AMD64_DR2_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[2]); in svm_save_dr()
187 rdmsrl(MSR_AMD64_DR3_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[3]); in svm_save_dr()
215 wrmsrl(MSR_AMD64_DR0_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[0]); in __restore_debug_registers()
216 wrmsrl(MSR_AMD64_DR1_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[1]); in __restore_debug_registers()
217 wrmsrl(MSR_AMD64_DR2_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[2]); in __restore_debug_registers()
218 wrmsrl(MSR_AMD64_DR3_ADDRESS_MASK, v->arch.hvm_svm.dr_mask[3]); in __restore_debug_registers()
237 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_restore_dr()
244 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_vmcb_save()
251 c->sysenter_cs = v->arch.hvm_svm.guest_sysenter_cs; in svm_vmcb_save()
252 c->sysenter_esp = v->arch.hvm_svm.guest_sysenter_esp; in svm_vmcb_save()
253 c->sysenter_eip = v->arch.hvm_svm.guest_sysenter_eip; in svm_vmcb_save()
271 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_vmcb_restore()
323 vmcb->sysenter_cs = v->arch.hvm_svm.guest_sysenter_cs = c->sysenter_cs; in svm_vmcb_restore()
324 vmcb->sysenter_esp = v->arch.hvm_svm.guest_sysenter_esp = c->sysenter_esp; in svm_vmcb_restore()
325 vmcb->sysenter_eip = v->arch.hvm_svm.guest_sysenter_eip = c->sysenter_eip; in svm_vmcb_restore()
354 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_save_cpu_state()
368 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_load_cpu_state()
406 ctxt->msr[ctxt->count].val = v->arch.hvm_svm.dr_mask[0]; in svm_save_msr()
410 ctxt->msr[ctxt->count].val = v->arch.hvm_svm.dr_mask[1]; in svm_save_msr()
414 ctxt->msr[ctxt->count].val = v->arch.hvm_svm.dr_mask[2]; in svm_save_msr()
418 ctxt->msr[ctxt->count].val = v->arch.hvm_svm.dr_mask[3]; in svm_save_msr()
439 v->arch.hvm_svm.dr_mask[0] = ctxt->msr[i].val; in svm_load_msr()
448 v->arch.hvm_svm.dr_mask[idx - MSR_AMD64_DR1_ADDRESS_MASK + 1] = in svm_load_msr()
497 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_get_interrupt_shadow()
511 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_interrupt_shadow()
525 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_guest_x86_mode()
538 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_update_guest_cr()
606 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_update_guest_efer()
618 struct arch_svm_struct *arch_svm = &v->arch.hvm_svm; in svm_update_guest_vendor()
633 struct arch_svm_struct *arch_svm = &v->arch.hvm_svm; in svm_sync_vmcb()
645 return vmcb_get_cpl(v->arch.hvm_svm.vmcb); in svm_get_cpl()
651 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_get_segment_register()
696 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_segment_register()
771 return v->arch.hvm_svm.vmcb->kerngsbase; in svm_get_shadow_gs_base()
776 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_guest_pat()
787 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_get_guest_pat()
835 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_tsc_offset()
868 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_rdtsc_exiting()
887 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_set_descriptor_access_exiting()
904 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_get_insn_bytes()
905 unsigned int len = v->arch.hvm_svm.cached_insn_len; in svm_get_insn_bytes()
911 v->arch.hvm_svm.cached_insn_len = 0; in svm_get_insn_bytes()
947 (curr->arch.hvm_svm.guest_lwp_cfg >> 40) & 0xff, in svm_lwp_interrupt()
954 if ( v->arch.hvm_svm.guest_lwp_cfg ) in svm_lwp_save()
964 if ( v->arch.hvm_svm.guest_lwp_cfg ) in svm_lwp_load()
965 wrmsrl(MSR_AMD64_LWP_CFG, v->arch.hvm_svm.cpu_lwp_cfg); in svm_lwp_load()
982 v->arch.hvm_svm.guest_lwp_cfg = msr_content; in svm_update_lwp_cfg()
988 v->arch.hvm_svm.cpu_lwp_cfg = (msr_content & 0xffff00ffffffffffULL) in svm_update_lwp_cfg()
994 v->arch.hvm_svm.cpu_lwp_cfg = msr_content & 0xffff00ff7fffffffULL; in svm_update_lwp_cfg()
997 wrmsrl(MSR_AMD64_LWP_CFG, v->arch.hvm_svm.cpu_lwp_cfg); in svm_update_lwp_cfg()
1048 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_ctxt_switch_to()
1080 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_do_resume()
1099 if ( v->arch.hvm_svm.launch_core != smp_processor_id() ) in svm_do_resume()
1101 v->arch.hvm_svm.launch_core = smp_processor_id(); in svm_do_resume()
1133 vcpu->arch.hvm_svm.osvw.length = (osvw_length >= 3) ? osvw_length : 3; in svm_guest_osvw_init()
1134 vcpu->arch.hvm_svm.osvw.status = osvw_status & ~(6ULL); in svm_guest_osvw_init()
1145 vcpu->arch.hvm_svm.osvw.status |= 1; in svm_guest_osvw_init()
1208 v->arch.hvm_svm.launch_core = -1; in svm_vcpu_initialise()
1240 const struct vmcb_struct *vmcb = curr->arch.hvm_svm.vmcb; in svm_emul_swint_injection()
1347 struct vmcb_struct *vmcb = curr->arch.hvm_svm.vmcb; in svm_inject_event()
1485 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_event_pending()
1571 *val = v->arch.hvm_svm.osvw.length; in svm_handle_osvw()
1573 *val = v->arch.hvm_svm.osvw.status; in svm_handle_osvw()
1759 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_fpu_dirty_intercept()
1832 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_msr_read_intercept()
1837 *msr_content = v->arch.hvm_svm.guest_sysenter_cs; in svm_msr_read_intercept()
1840 *msr_content = v->arch.hvm_svm.guest_sysenter_esp; in svm_msr_read_intercept()
1843 *msr_content = v->arch.hvm_svm.guest_sysenter_eip; in svm_msr_read_intercept()
1885 *msr_content = v->arch.hvm_svm.guest_lwp_cfg; in svm_msr_read_intercept()
1915 *msr_content = v->arch.hvm_svm.dr_mask[0]; in svm_msr_read_intercept()
1922 v->arch.hvm_svm.dr_mask[msr - MSR_AMD64_DR1_ADDRESS_MASK + 1]; in svm_msr_read_intercept()
1973 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_msr_write_intercept()
1993 vmcb->sysenter_cs = v->arch.hvm_svm.guest_sysenter_cs = msr_content; in svm_msr_write_intercept()
1996 vmcb->sysenter_esp = v->arch.hvm_svm.guest_sysenter_esp = msr_content; in svm_msr_write_intercept()
1999 vmcb->sysenter_eip = v->arch.hvm_svm.guest_sysenter_eip = msr_content; in svm_msr_write_intercept()
2071 v->arch.hvm_svm.dr_mask[0] = msr_content; in svm_msr_write_intercept()
2077 v->arch.hvm_svm.dr_mask[msr - MSR_AMD64_DR1_ADDRESS_MASK + 1] = in svm_msr_write_intercept()
2124 bool rdmsr = curr->arch.hvm_svm.vmcb->exitinfo1 == 0; in svm_do_msr_access()
2266 v->arch.hvm_svm.vmcb_in_sync = 0; in svm_vmexit_do_vmload()
2395 const struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_get_pending_event()
2468 struct vmcb_struct *vmcb = v->arch.hvm_svm.vmcb; in svm_vmexit_handler()
2643 v->arch.hvm_svm.cached_insn_len = vmcb->guest_ins_len & 0xf; in svm_vmexit_handler()
2645 v->arch.hvm_svm.cached_insn_len = 0; in svm_vmexit_handler()
2829 v->arch.hvm_svm.cached_insn_len = vmcb->guest_ins_len & 0xf; in svm_vmexit_handler()
2841 v->arch.hvm_svm.cached_insn_len = 0; in svm_vmexit_handler()