Searched refs:vmcs (Results 1 – 9 of 9) sorted by relevance
| /arch/x86/kvm/vmx/ |
| A D | vmx_ops.h | 15 void vmclear_error(struct vmcs *vmcs, u64 phys_addr); 16 void vmptrld_error(struct vmcs *vmcs, u64 phys_addr); 289 static inline void vmcs_clear(struct vmcs *vmcs) in vmcs_clear() argument 291 u64 phys_addr = __pa(vmcs); in vmcs_clear() 293 vmx_asm1(vmclear, "m"(phys_addr), vmcs, phys_addr); in vmcs_clear() 296 static inline void vmcs_load(struct vmcs *vmcs) in vmcs_load() argument 298 u64 phys_addr = __pa(vmcs); in vmcs_load() 303 vmx_asm1(vmptrld, "m"(phys_addr), vmcs, phys_addr); in vmcs_load()
|
| A D | vmcs.h | 21 struct vmcs { struct 27 DECLARE_PER_CPU(struct vmcs *, current_vmcs); argument 62 struct vmcs *vmcs; member 63 struct vmcs *shadow_vmcs;
|
| A D | tdx.h | 179 TDX_BUILD_TDVPS_ACCESSORS(16, VMCS, vmcs); 180 TDX_BUILD_TDVPS_ACCESSORS(32, VMCS, vmcs); 181 TDX_BUILD_TDVPS_ACCESSORS(64, VMCS, vmcs);
|
| A D | vmx.h | 594 static inline u##bits __##lname##_controls_get(struct loaded_vmcs *vmcs) \ 596 return vmcs->controls_shadow.lname; \ 667 struct vmcs *alloc_vmcs_cpu(bool shadow, int cpu, gfp_t flags); 668 void free_vmcs(struct vmcs *vmcs); 673 static inline struct vmcs *alloc_vmcs(bool shadow) in alloc_vmcs()
|
| A D | vmx.c | 448 noinline void vmclear_error(struct vmcs *vmcs, u64 phys_addr) in vmclear_error() argument 454 noinline void vmptrld_error(struct vmcs *vmcs, u64 phys_addr) in vmptrld_error() argument 719 vmcs_clear(v->vmcs); in vmx_emergency_disable_virtualization_cpu() 1402 struct vmcs *prev; in vmx_vcpu_load_vmcs() 2863 struct vmcs *vmcs; in alloc_vmcs_cpu() local 2879 return vmcs; in alloc_vmcs_cpu() 2882 void free_vmcs(struct vmcs *vmcs) in free_vmcs() argument 2892 if (!loaded_vmcs->vmcs) in free_loaded_vmcs() 2905 if (!loaded_vmcs->vmcs) in alloc_loaded_vmcs() 2949 struct vmcs *vmcs; in alloc_kvm_area() local [all …]
|
| A D | nested.c | 293 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs() argument 299 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs() 304 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs() 1611 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_shadow_to_vmcs12() 1631 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12() 1646 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_vmcs12_to_shadow() 1667 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow() 5306 static struct vmcs *alloc_shadow_vmcs(struct kvm_vcpu *vcpu) in alloc_shadow_vmcs() 5770 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()
|
| /arch/loongarch/kvm/ |
| A D | main.c | 19 static struct kvm_context __percpu *vmcs; variable 275 kvm->arch.vmcs = vmcs; in kvm_init_vmcs() 325 this_cpu_ptr(vmcs)->last_vcpu = NULL; in kvm_arch_enable_virtualization_cpu() 348 if (!vmcs) { in kvm_loongarch_env_init() 355 free_percpu(vmcs); in kvm_loongarch_env_init() 356 vmcs = NULL; in kvm_loongarch_env_init() 371 free_percpu(vmcs); in kvm_loongarch_env_init() 372 vmcs = NULL; in kvm_loongarch_env_init() 391 context = per_cpu_ptr(vmcs, cpu); in kvm_loongarch_env_init() 419 if (vmcs) in kvm_loongarch_env_exit() [all …]
|
| A D | vcpu.c | 45 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_save_host_pmu() 60 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_restore_host_pmu() 1575 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_arch_vcpu_destroy() 1597 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in _kvm_vcpu_load()
|
| /arch/loongarch/include/asm/ |
| A D | kvm_host.h | 131 struct kvm_context __percpu *vmcs; member
|
Completed in 43 milliseconds