Lines Matching refs:loaded_vmcs
275 struct loaded_vmcs *prev) in vmx_sync_vmcs_host_state()
283 dest = &vmx->loaded_vmcs->host_state; in vmx_sync_vmcs_host_state()
293 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs()
296 struct loaded_vmcs *prev; in vmx_switch_vmcs()
299 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs()
303 prev = vmx->loaded_vmcs; in vmx_switch_vmcs()
304 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs()
336 if (WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01)) in free_nested()
1631 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12()
1667 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow()
2356 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct loaded_vmcs *vmcs01, in prepare_vmcs02_early()
2516 vmx->loaded_vmcs->nmi_known_unmasked = in prepare_vmcs02_early()
3247 if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) { in nested_vmx_check_vmentry_hw()
3249 vmx->loaded_vmcs->host_state.cr3 = cr3; in nested_vmx_check_vmentry_hw()
3253 if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) { in nested_vmx_check_vmentry_hw()
3255 vmx->loaded_vmcs->host_state.cr4 = cr4; in nested_vmx_check_vmentry_hw()
4537 WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01); in copy_vmcs02_to_vmcs12_rare()
4540 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4545 vmx->loaded_vmcs = &vmx->vmcs01; in copy_vmcs02_to_vmcs12_rare()
5309 struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs; in alloc_shadow_vmcs() local
5319 if (WARN_ON(loaded_vmcs != &vmx->vmcs01 || loaded_vmcs->shadow_vmcs)) in alloc_shadow_vmcs()
5320 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5322 loaded_vmcs->shadow_vmcs = alloc_vmcs(true); in alloc_shadow_vmcs()
5323 if (loaded_vmcs->shadow_vmcs) in alloc_shadow_vmcs()
5324 vmcs_clear(loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
5326 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5770 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()