Lines Matching refs:nested
715 vcpu->arch.nested.current_vvmcs = NULL; in reset_vvmcs()
718 vvmcs = &vcpu->arch.nested.vvmcs[idx]; in reset_vvmcs()
751 } else if (vcpu->arch.nested.vmxon == true) { in vmxon_vmexit_handler()
761 vcpu->arch.nested.vmxon = true; in vmxon_vmexit_handler()
762 vcpu->arch.nested.in_l2_guest = false; in vmxon_vmexit_handler()
763 vcpu->arch.nested.vmxon_ptr = vmptr_gpa; in vmxon_vmexit_handler()
784 if ((vcpu->arch.nested.vmxon == false) in check_vmx_permission()
807 vcpu->arch.nested.vmxon = false; in vmxoff_vmexit_handler()
808 vcpu->arch.nested.in_l2_guest = false; in vmxoff_vmexit_handler()
837 if (vcpu->arch.nested.vvmcs[idx].vmcs12_gpa == vmcs12_gpa) { in lookup_vvmcs()
838 vvmcs = &vcpu->arch.nested.vvmcs[idx]; in lookup_vvmcs()
851 struct acrn_nested *nested = &vcpu->arch.nested; in get_or_replace_vvmcs_entry() local
857 if (nested->vvmcs[idx].vmcs12_gpa == INVALID_GPA) { in get_or_replace_vvmcs_entry()
859 vvmcs = &nested->vvmcs[idx]; in get_or_replace_vvmcs_entry()
868 if (nested->vvmcs[idx].ref_cnt < min_cnt) { in get_or_replace_vvmcs_entry()
869 min_cnt = nested->vvmcs[idx].ref_cnt; in get_or_replace_vvmcs_entry()
870 vvmcs = &nested->vvmcs[idx]; in get_or_replace_vvmcs_entry()
879 nested->vvmcs[idx].ref_cnt = 0U; in get_or_replace_vvmcs_entry()
891 struct acrn_vvmcs *cur_vvmcs = vcpu->arch.nested.current_vvmcs; in vmread_vmexit_handler()
926 struct acrn_vvmcs *cur_vvmcs = vcpu->arch.nested.current_vvmcs; in vmwrite_vmexit_handler()
1154 struct acrn_nested *nested = &vcpu->arch.nested; in vmptrld_vmexit_handler() local
1163 } else if (vmcs12_gpa == nested->vmxon_ptr) { in vmptrld_vmexit_handler()
1167 } else if ((nested->current_vvmcs != NULL) && (nested->current_vvmcs->vmcs12_gpa == vmcs12_gpa)) { in vmptrld_vmexit_handler()
1216 nested->current_vvmcs = vvmcs; in vmptrld_vmexit_handler()
1229 struct acrn_nested *nested = &vcpu->arch.nested; in vmclear_vmexit_handler() local
1238 } else if (vmcs12_gpa == nested->vmxon_ptr) { in vmclear_vmexit_handler()
1246 if (nested->current_vvmcs) { in vmclear_vmexit_handler()
1247 current_vmcs12_gpa = nested->current_vvmcs->vmcs12_gpa; in vmclear_vmexit_handler()
1260 nested->current_vvmcs = NULL; in vmclear_vmexit_handler()
1266 enable_vmcs_shadowing(nested->current_vvmcs); in vmclear_vmexit_handler()
1294 return vcpu->arch.nested.in_l2_guest; in is_vcpu_in_l2_guest()
1329 struct acrn_vmcs12 *vmcs12 = &vcpu->arch.nested.current_vvmcs->vmcs12; in set_vmcs01_guest_state()
1332 if (vcpu->arch.nested.current_vvmcs->host_state_dirty == true) { in set_vmcs01_guest_state()
1333 vcpu->arch.nested.current_vvmcs->host_state_dirty = false; in set_vmcs01_guest_state()
1426 struct acrn_vvmcs *cur_vvmcs = vcpu->arch.nested.current_vvmcs; in nested_vmexit_handler()
1450 vcpu->arch.nested.in_l2_guest = false; in nested_vmexit_handler()
1473 struct acrn_vvmcs *cur_vvmcs = vcpu->arch.nested.current_vvmcs; in nested_vmentry()
1505 vcpu->arch.nested.in_l2_guest = true; in nested_vmentry()