Lines Matching refs:ctl

62 	u64 cr3 = svm->nested.ctl.nested_cr3;  in nested_svm_get_tdp_pdptr()
81 return svm->nested.ctl.nested_cr3; in nested_svm_get_tdp_cr3()
99 svm->nested.ctl.nested_cr3); in nested_svm_init_mmu_context()
120 if (!(svm->nested.ctl.virt_ext & VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK)) in nested_vmcb_needs_vls_intercept()
139 g = &svm->nested.ctl; in recalc_intercepts()
281 struct hv_vmcb_enlightenments *hve = &svm->nested.ctl.hv_enlightenments; in nested_svm_merge_msrpm()
285 (svm->nested.ctl.clean & BIT(HV_VMCB_NESTED_ENLIGHTENMENTS))) in nested_svm_merge_msrpm()
290 if (!(vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_MSR_PROT))) in nested_svm_merge_msrpm()
298 gpa = svm->nested.ctl.msrpm_base_pa + (p * sizeof(l1_val)); in nested_svm_merge_msrpm()
401 struct vmcb_ctrl_area_cached *ctl = &svm->nested.ctl; in nested_vmcb_check_controls() local
403 return __nested_vmcb_check_controls(vcpu, ctl); in nested_vmcb_check_controls()
456 __nested_copy_vmcb_control_to_cache(&svm->vcpu, &svm->nested.ctl, control); in nested_copy_vmcb_control_to_cache()
488 svm->nested.ctl.event_inj = svm->vmcb->control.event_inj; in nested_sync_control_from_vmcb02()
489 svm->nested.ctl.event_inj_err = svm->vmcb->control.event_inj_err; in nested_sync_control_from_vmcb02()
502 !test_bit(INTERCEPT_VINTR, (unsigned long *)svm->nested.ctl.intercepts)) in nested_sync_control_from_vmcb02()
511 svm->nested.ctl.int_ctl &= ~mask; in nested_sync_control_from_vmcb02()
512 svm->nested.ctl.int_ctl |= svm->vmcb->control.int_ctl & mask; in nested_sync_control_from_vmcb02()
665 (svm->nested.ctl.virt_ext & LBR_CTL_ENABLE_MASK))) { in nested_vmcb02_prepare_save()
727 (svm->nested.ctl.int_ctl & V_GIF_ENABLE_MASK)) in nested_vmcb02_prepare_control()
770 if (vmcb02->save.rip && (svm->nested.ctl.bus_lock_rip == vmcb02->save.rip)) in nested_vmcb02_prepare_control()
786 svm->nested.ctl.tsc_offset, in nested_vmcb02_prepare_control()
796 (svm->nested.ctl.int_ctl & int_ctl_vmcb12_bits) | in nested_vmcb02_prepare_control()
799 vmcb02->control.int_vector = svm->nested.ctl.int_vector; in nested_vmcb02_prepare_control()
800 vmcb02->control.int_state = svm->nested.ctl.int_state; in nested_vmcb02_prepare_control()
801 vmcb02->control.event_inj = svm->nested.ctl.event_inj; in nested_vmcb02_prepare_control()
802 vmcb02->control.event_inj_err = svm->nested.ctl.event_inj_err; in nested_vmcb02_prepare_control()
813 vmcb02->control.next_rip = svm->nested.ctl.next_rip; in nested_vmcb02_prepare_control()
823 svm->soft_int_next_rip = svm->nested.ctl.next_rip; in nested_vmcb02_prepare_control()
832 (svm->nested.ctl.virt_ext & LBR_CTL_ENABLE_MASK); in nested_vmcb02_prepare_control()
838 pause_count12 = svm->nested.ctl.pause_filter_count; in nested_vmcb02_prepare_control()
842 pause_thresh12 = svm->nested.ctl.pause_filter_thresh; in nested_vmcb02_prepare_control()
856 if (vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_PAUSE)) { in nested_vmcb02_prepare_control()
1126 vmcb12->control.int_ctl = svm->nested.ctl.int_ctl; in nested_svm_vmexit()
1127 vmcb12->control.event_inj = svm->nested.ctl.event_inj; in nested_svm_vmexit()
1128 vmcb12->control.event_inj_err = svm->nested.ctl.event_inj_err; in nested_svm_vmexit()
1141 svm->nested.ctl.bus_lock_rip = INVALID_GPA; in nested_svm_vmexit()
1172 (svm->nested.ctl.virt_ext & LBR_CTL_ENABLE_MASK))) { in nested_svm_vmexit()
1213 svm->nested.ctl.nested_cr3 = 0; in nested_svm_vmexit()
1277 if (!vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_SHUTDOWN)) in nested_svm_triple_fault()
1359 gpa_t base = svm->nested.ctl.msrpm_base_pa; in nested_svm_exit_handled_msr()
1364 if (!(vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_MSR_PROT))) in nested_svm_exit_handled_msr()
1389 if (!(vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_IOIO_PROT))) in nested_svm_intercept_ioio()
1395 gpa = svm->nested.ctl.iopm_base_pa + (port / 8); in nested_svm_intercept_ioio()
1420 if (vmcb12_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
1425 if (vmcb12_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
1443 if (vmcb12_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
1483 return (svm->nested.ctl.intercepts[INTERCEPT_EXCEPTION] & BIT(vector)); in nested_svm_is_exception_vmexit()
1524 return vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_INIT); in nested_exit_on_init()
1689 struct vmcb_control_area *ctl; in svm_get_nested_state() local
1733 ctl = kzalloc(sizeof(*ctl), GFP_KERNEL); in svm_get_nested_state()
1734 if (!ctl) in svm_get_nested_state()
1737 nested_copy_vmcb_cache_to_control(ctl, &svm->nested.ctl); in svm_get_nested_state()
1738 r = copy_to_user(&user_vmcb->control, ctl, in svm_get_nested_state()
1740 kfree(ctl); in svm_get_nested_state()
1758 struct vmcb_control_area *ctl; in svm_set_nested_state() local
1802 ctl = kzalloc(sizeof(*ctl), GFP_KERNEL); in svm_set_nested_state()
1804 if (!ctl || !save) in svm_set_nested_state()
1808 if (copy_from_user(ctl, &user_vmcb->control, sizeof(*ctl))) in svm_set_nested_state()
1814 __nested_copy_vmcb_control_to_cache(vcpu, &ctl_cached, ctl); in svm_set_nested_state()
1858 nested_copy_vmcb_control_to_cache(svm, ctl); in svm_set_nested_state()
1881 kfree(ctl); in svm_set_nested_state()