Lines Matching refs:vmcb

345 	svm->vmcb->save.efer = efer | EFER_SVME;  in svm_set_efer()
346 vmcb_mark_dirty(svm->vmcb, VMCB_CR); in svm_set_efer()
355 if (svm->vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) in svm_get_interrupt_shadow()
365 svm->vmcb->control.int_state &= ~SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
367 svm->vmcb->control.int_state |= SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
384 if (nrips && svm->vmcb->control.next_rip != 0) { in __svm_skip_emulated_instruction()
386 svm->next_rip = svm->vmcb->control.next_rip; in __svm_skip_emulated_instruction()
391 old_rflags = svm->vmcb->save.rflags; in __svm_skip_emulated_instruction()
397 svm->vmcb->save.rflags = old_rflags; in __svm_skip_emulated_instruction()
446 svm->soft_int_csbase = svm->vmcb->save.cs.base; in svm_update_soft_interrupt_rip()
454 svm->vmcb->control.next_rip = rip; in svm_update_soft_interrupt_rip()
470 svm->vmcb->control.event_inj = ex->vector in svm_inject_exception()
474 svm->vmcb->control.event_inj_err = ex->error_code; in svm_inject_exception()
912 void svm_copy_lbrs(struct vmcb *to_vmcb, struct vmcb *from_vmcb) in svm_copy_lbrs()
927 svm->vmcb->control.virt_ext |= LBR_CTL_ENABLE_MASK; in svm_enable_lbrv()
935 svm_copy_lbrs(svm->vmcb, svm->vmcb01.ptr); in svm_enable_lbrv()
942 svm->vmcb->control.virt_ext &= ~LBR_CTL_ENABLE_MASK; in svm_disable_lbrv()
953 svm_copy_lbrs(svm->vmcb01.ptr, svm->vmcb); in svm_disable_lbrv()
965 struct vmcb *vmcb = in svm_get_lbr_msr() local
966 (svm->vmcb->control.virt_ext & LBR_CTL_ENABLE_MASK) ? in svm_get_lbr_msr()
967 svm->vmcb : svm->vmcb01.ptr; in svm_get_lbr_msr()
971 return vmcb->save.dbgctl; in svm_get_lbr_msr()
973 return vmcb->save.br_from; in svm_get_lbr_msr()
975 return vmcb->save.br_to; in svm_get_lbr_msr()
977 return vmcb->save.last_excp_from; in svm_get_lbr_msr()
979 return vmcb->save.last_excp_to; in svm_get_lbr_msr()
994 bool current_enable_lbrv = !!(svm->vmcb->control.virt_ext & in svm_update_lbrv()
1017 svm->vmcb->save.rflags &= ~X86_EFLAGS_TF; in disable_nmi_singlestep()
1019 svm->vmcb->save.rflags &= ~X86_EFLAGS_RF; in disable_nmi_singlestep()
1026 struct vmcb_control_area *control = &svm->vmcb->control; in grow_ple_window()
1038 vmcb_mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in grow_ple_window()
1047 struct vmcb_control_area *control = &svm->vmcb->control; in shrink_ple_window()
1059 vmcb_mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in shrink_ple_window()
1115 svm->vmcb->control.tsc_offset = offset; in svm_write_tsc_offset()
1116 vmcb_mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in svm_write_tsc_offset()
1161 svm->vmcb->control.virt_ext &= ~VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK; in init_vmcb_after_set_cpuid()
1175 svm->vmcb->control.virt_ext |= VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK; in init_vmcb_after_set_cpuid()
1186 struct vmcb *vmcb = svm->vmcb01.ptr; in init_vmcb() local
1187 struct vmcb_control_area *control = &vmcb->control; in init_vmcb()
1188 struct vmcb_save_area *save = &vmcb->save; in init_vmcb()
1312 avic_init_vmcb(svm, vmcb); in init_vmcb()
1317 svm->vmcb->control.int_ctl |= V_GIF_ENABLE_MASK; in init_vmcb()
1323 svm_hv_init_vmcb(vmcb); in init_vmcb()
1326 vmcb_mark_all_dirty(vmcb); in init_vmcb()
1364 svm->vmcb = target_vmcb->ptr; in svm_switch_vmcb()
1432 static void svm_clear_current_vmcb(struct vmcb *vmcb) in svm_clear_current_vmcb() argument
1437 cmpxchg(per_cpu_ptr(&svm_data.current_vmcb, i), vmcb, NULL); in svm_clear_current_vmcb()
1449 svm_clear_current_vmcb(svm->vmcb); in svm_vcpu_free()
1502 if (sd->current_vmcb != svm->vmcb) { in svm_vcpu_load()
1503 sd->current_vmcb = svm->vmcb; in svm_vcpu_load()
1523 unsigned long rflags = svm->vmcb->save.rflags; in svm_get_rflags()
1545 to_svm(vcpu)->vmcb->save.rflags = rflags; in svm_set_rflags()
1550 struct vmcb *vmcb = to_svm(vcpu)->vmcb; in svm_get_if_flag() local
1553 ? vmcb->control.int_state & SVM_GUEST_INTERRUPT_MASK in svm_get_if_flag()
1590 control = &svm->vmcb->control; in svm_set_vintr()
1595 vmcb_mark_dirty(svm->vmcb, VMCB_INTR); in svm_set_vintr()
1603 svm->vmcb->control.int_ctl &= ~V_IRQ_INJECTION_BITS_MASK; in svm_clear_vintr()
1607 WARN_ON((svm->vmcb->control.int_ctl & V_TPR_MASK) != in svm_clear_vintr()
1610 svm->vmcb->control.int_ctl |= svm->nested.ctl.int_ctl & in svm_clear_vintr()
1613 svm->vmcb->control.int_vector = svm->nested.ctl.int_vector; in svm_clear_vintr()
1616 vmcb_mark_dirty(svm->vmcb, VMCB_INTR); in svm_clear_vintr()
1621 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_seg()
1709 var->dpl = to_svm(vcpu)->vmcb->save.cpl; in svm_get_segment()
1716 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_get_cpl()
1734 dt->size = svm->vmcb->save.idtr.limit; in svm_get_idt()
1735 dt->address = svm->vmcb->save.idtr.base; in svm_get_idt()
1742 svm->vmcb->save.idtr.limit = dt->size; in svm_set_idt()
1743 svm->vmcb->save.idtr.base = dt->address ; in svm_set_idt()
1744 vmcb_mark_dirty(svm->vmcb, VMCB_DT); in svm_set_idt()
1751 dt->size = svm->vmcb->save.gdtr.limit; in svm_get_gdt()
1752 dt->address = svm->vmcb->save.gdtr.base; in svm_get_gdt()
1759 svm->vmcb->save.gdtr.limit = dt->size; in svm_set_gdt()
1760 svm->vmcb->save.gdtr.base = dt->address ; in svm_set_gdt()
1761 vmcb_mark_dirty(svm->vmcb, VMCB_DT); in svm_set_gdt()
1777 svm->vmcb->save.cr3 = cr3; in sev_post_set_cr3()
1778 vmcb_mark_dirty(svm->vmcb, VMCB_CR); in sev_post_set_cr3()
1792 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()
1797 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
1817 svm->vmcb->save.cr0 = hcr0; in svm_set_cr0()
1818 vmcb_mark_dirty(svm->vmcb, VMCB_CR); in svm_set_cr0()
1858 to_svm(vcpu)->vmcb->save.cr4 = cr4; in svm_set_cr4()
1859 vmcb_mark_dirty(to_svm(vcpu)->vmcb, VMCB_CR); in svm_set_cr4()
1891 svm->vmcb->save.cpl = (var->dpl & 3); in svm_set_segment()
1893 vmcb_mark_dirty(svm->vmcb, VMCB_SEG); in svm_set_segment()
1913 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ALL_ASID; in new_asid()
1914 vmcb_mark_dirty(svm->vmcb, VMCB_ASID); in new_asid()
1923 struct vmcb *vmcb = svm->vmcb; in svm_set_dr6() local
1928 if (unlikely(value != vmcb->save.dr6)) { in svm_set_dr6()
1929 vmcb->save.dr6 = value; in svm_set_dr6()
1930 vmcb_mark_dirty(vmcb, VMCB_DR); in svm_set_dr6()
1949 vcpu->arch.dr6 = svm->vmcb->save.dr6; in svm_sync_dirty_debug_regs()
1950 vcpu->arch.dr7 = svm->vmcb->save.dr7; in svm_sync_dirty_debug_regs()
1962 svm->vmcb->save.dr7 = value; in svm_set_dr7()
1963 vmcb_mark_dirty(svm->vmcb, VMCB_DR); in svm_set_dr7()
1970 u64 fault_address = svm->vmcb->control.exit_info_2; in pf_interception()
1971 u64 error_code = svm->vmcb->control.exit_info_1; in pf_interception()
1975 svm->vmcb->control.insn_bytes : NULL, in pf_interception()
1976 svm->vmcb->control.insn_len); in pf_interception()
1983 u64 fault_address = svm->vmcb->control.exit_info_2; in npf_interception()
1984 u64 error_code = svm->vmcb->control.exit_info_1; in npf_interception()
1989 svm->vmcb->control.insn_bytes : NULL, in npf_interception()
1990 svm->vmcb->control.insn_len); in npf_interception()
2001 u32 payload = svm->vmcb->save.dr6 ^ DR6_ACTIVE_LOW; in db_interception()
2015 kvm_run->debug.arch.dr6 = svm->vmcb->save.dr6; in db_interception()
2016 kvm_run->debug.arch.dr7 = svm->vmcb->save.dr7; in db_interception()
2018 svm->vmcb->save.cs.base + svm->vmcb->save.rip; in db_interception()
2032 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip; in bp_interception()
2133 clear_page(svm->vmcb); in shutdown_interception()
2143 u32 io_info = svm->vmcb->control.exit_info_1; /* address size bug? */ in io_interception()
2160 svm->next_rip = svm->vmcb->control.exit_info_2; in io_interception()
2184 struct vmcb *vmcb12; in vmload_vmsave_interception()
2191 ret = kvm_vcpu_map(vcpu, gpa_to_gfn(svm->vmcb->save.rax), &map); in vmload_vmsave_interception()
2203 svm_copy_vmloadsave_state(svm->vmcb, vmcb12); in vmload_vmsave_interception()
2207 svm_copy_vmloadsave_state(vmcb12, svm->vmcb); in vmload_vmsave_interception()
2298 u32 error_code = svm->vmcb->control.exit_info_1; in gp_interception()
2324 if (svm->vmcb->save.rax & ~PAGE_MASK) in gp_interception()
2401 trace_kvm_invlpga(to_svm(vcpu)->vmcb->save.rip, asid, gva); in invlpga_interception()
2411 trace_kvm_skinit(to_svm(vcpu)->vmcb->save.rip, kvm_rax_read(vcpu)); in skinit_interception()
2422 int int_type = svm->vmcb->control.exit_int_info & in task_switch_interception()
2424 int int_vec = svm->vmcb->control.exit_int_info & SVM_EVTINJ_VEC_MASK; in task_switch_interception()
2426 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_TYPE_MASK; in task_switch_interception()
2428 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_VALID; in task_switch_interception()
2432 tss_selector = (u16)svm->vmcb->control.exit_info_1; in task_switch_interception()
2434 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2437 else if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2451 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2455 (u32)svm->vmcb->control.exit_info_2; in task_switch_interception()
2502 kvm_mmu_invlpg(vcpu, to_svm(vcpu)->vmcb->control.exit_info_1); in invlpg_interception()
2531 svm->vmcb->control.exit_code = SVM_EXIT_CR0_SEL_WRITE; in check_selective_cr0_intercepted()
2550 if (unlikely((svm->vmcb->control.exit_info_1 & CR_VALID) == 0)) in cr_interception()
2553 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in cr_interception()
2554 if (svm->vmcb->control.exit_code == SVM_EXIT_CR0_SEL_WRITE) in cr_interception()
2557 cr = svm->vmcb->control.exit_code - SVM_EXIT_READ_CR0; in cr_interception()
2621 new_value = (unsigned long)svm->vmcb->control.exit_info_1; in cr_trap()
2623 cr = svm->vmcb->control.exit_code - SVM_EXIT_CR0_WRITE_TRAP; in cr_trap()
2670 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in dr_interception()
2671 dr = svm->vmcb->control.exit_code - SVM_EXIT_READ_DR0; in dr_interception()
2712 msr_info.data = to_svm(vcpu)->vmcb->control.exit_info_1 & ~EFER_SVME; in efer_trap()
2796 msr_info->data = svm->vmcb->save.spec_ctrl; in svm_get_msr()
2915 vmcb_mark_dirty(svm->vmcb, VMCB_NPT); in svm_set_msr()
2926 svm->vmcb->save.spec_ctrl = data; in svm_set_msr()
3027 if (svm->vmcb->control.virt_ext & LBR_CTL_ENABLE_MASK) in svm_set_msr()
3028 svm->vmcb->save.dbgctl = data; in svm_set_msr()
3078 if (to_svm(vcpu)->vmcb->control.exit_info_1) in msr_interception()
3139 type = svm->vmcb->control.exit_info_2; in invpcid_interception()
3140 gva = svm->vmcb->control.exit_info_1; in invpcid_interception()
3222 struct vmcb_control_area *control = &svm->vmcb->control; in dump_vmcb()
3223 struct vmcb_save_area *save = &svm->vmcb->save; in dump_vmcb()
3384 struct vmcb_control_area *control = &to_svm(vcpu)->vmcb->control; in svm_get_exit_info()
3401 u32 exit_code = svm->vmcb->control.exit_code; in svm_handle_exit()
3408 vcpu->arch.cr0 = svm->vmcb->save.cr0; in svm_handle_exit()
3410 vcpu->arch.cr3 = svm->vmcb->save.cr3; in svm_handle_exit()
3427 if (svm->vmcb->control.exit_code == SVM_EXIT_ERR) { in svm_handle_exit()
3430 = svm->vmcb->control.exit_code; in svm_handle_exit()
3462 vmcb_mark_all_dirty(svm->vmcb); in pre_svm_run()
3478 svm->vmcb->control.event_inj = SVM_EVTINJ_VALID | SVM_EVTINJ_TYPE_NMI; in svm_inject_nmi()
3507 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr | in svm_inject_irq()
3587 struct vmcb *vmcb = svm->vmcb; in svm_nmi_blocked() local
3595 return (vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) || in svm_nmi_blocked()
3637 struct vmcb *vmcb = svm->vmcb; in svm_interrupt_blocked() local
3657 return (vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK); in svm_interrupt_blocked()
3729 svm->vmcb->save.rflags |= (X86_EFLAGS_TF | X86_EFLAGS_RF); in svm_enable_nmi_window()
3751 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ASID; in svm_flush_tlb_current()
3760 invlpga(gva, svm->vmcb->control.asid); in svm_flush_tlb_gva()
3771 int cr8 = svm->vmcb->control.int_ctl & V_TPR_MASK; in sync_cr8_to_lapic()
3786 svm->vmcb->control.int_ctl &= ~V_TPR_MASK; in sync_lapic_to_cr8()
3787 svm->vmcb->control.int_ctl |= cr8 & V_TPR_MASK; in sync_lapic_to_cr8()
3808 svm->vmcb->control.next_rip = svm->soft_int_next_rip; in svm_complete_soft_interrupt()
3827 u32 exitintinfo = svm->vmcb->control.exit_int_info; in svm_complete_interrupts()
3874 u32 err = svm->vmcb->control.exit_int_info_err; in svm_complete_interrupts()
3895 struct vmcb_control_area *control = &svm->vmcb->control; in svm_cancel_injection()
3910 struct vmcb_control_area *control = &to_svm(vcpu)->vmcb->control; in svm_exit_handlers_fastpath()
3944 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_vcpu_run()
3945 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_vcpu_run()
3946 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_vcpu_run()
3954 if (svm->nmi_singlestep && svm->vmcb->control.event_inj) { in svm_vcpu_run()
3968 if (unlikely(svm->asid != svm->vmcb->control.asid)) { in svm_vcpu_run()
3969 svm->vmcb->control.asid = svm->asid; in svm_vcpu_run()
3970 vmcb_mark_dirty(svm->vmcb, VMCB_ASID); in svm_vcpu_run()
3972 svm->vmcb->save.cr2 = vcpu->arch.cr2; in svm_vcpu_run()
3974 svm_hv_update_vp_id(svm->vmcb, vcpu); in svm_vcpu_run()
4008 vcpu->arch.cr2 = svm->vmcb->save.cr2; in svm_vcpu_run()
4009 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax; in svm_vcpu_run()
4010 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp; in svm_vcpu_run()
4011 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip; in svm_vcpu_run()
4015 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
4023 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
4034 svm->vmcb->control.exit_code != SVM_EXIT_ERR) in svm_vcpu_run()
4040 svm->vmcb->control.tlb_ctl = TLB_CONTROL_DO_NOTHING; in svm_vcpu_run()
4041 vmcb_mark_all_clean(svm->vmcb); in svm_vcpu_run()
4044 if (svm->vmcb->control.exit_code == SVM_EXIT_EXCP_BASE + PF_VECTOR) in svm_vcpu_run()
4054 if (unlikely(svm->vmcb->control.exit_code == in svm_vcpu_run()
4073 svm->vmcb->control.nested_cr3 = __sme_set(root_hpa); in svm_load_mmu_pgd()
4074 vmcb_mark_dirty(svm->vmcb, VMCB_NPT); in svm_load_mmu_pgd()
4087 svm->vmcb->save.cr3 = cr3; in svm_load_mmu_pgd()
4088 vmcb_mark_dirty(svm->vmcb, VMCB_CR); in svm_load_mmu_pgd()
4241 struct vmcb *vmcb = svm->vmcb; in svm_check_intercept() local
4292 vmcb->control.exit_info_1 = 1; in svm_check_intercept()
4294 vmcb->control.exit_info_1 = 0; in svm_check_intercept()
4331 vmcb->control.exit_info_1 = exit_info; in svm_check_intercept()
4332 vmcb->control.exit_info_2 = info->next_rip; in svm_check_intercept()
4342 vmcb->control.next_rip = info->next_rip; in svm_check_intercept()
4343 vmcb->control.exit_code = icpt_info.exit_code; in svm_check_intercept()
4355 if (to_svm(vcpu)->vmcb->control.exit_code == SVM_EXIT_INTR) in svm_handle_exit_irqoff()
4419 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_enter_smm()
4420 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_enter_smm()
4421 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_enter_smm()
4442 BUILD_BUG_ON(offsetof(struct vmcb, save) != 0x400); in svm_enter_smm()
4455 struct vmcb *vmcb12; in svm_leave_smm()
4619 error_code = to_svm(vcpu)->vmcb->control.exit_info_1; in svm_can_emulate_instruction()