| /linux/arch/loongarch/kvm/ |
| A D | main.c | 17 static struct kvm_context __percpu *vmcs; variable 255 kvm->arch.vmcs = vmcs; in kvm_init_vmcs() 320 vmcs = alloc_percpu(struct kvm_context); in kvm_loongarch_env_init() 321 if (!vmcs) { in kvm_loongarch_env_init() 328 free_percpu(vmcs); in kvm_loongarch_env_init() 329 vmcs = NULL; in kvm_loongarch_env_init() 344 free_percpu(vmcs); in kvm_loongarch_env_init() 345 vmcs = NULL; in kvm_loongarch_env_init() 364 context = per_cpu_ptr(vmcs, cpu); in kvm_loongarch_env_init() 378 if (vmcs) in kvm_loongarch_env_exit() [all …]
|
| A D | vcpu.c | 39 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_save_host_pmu() 54 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_restore_host_pmu() 1517 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_arch_vcpu_destroy() 1539 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in _kvm_vcpu_load()
|
| /linux/arch/x86/kvm/vmx/ |
| A D | vmx_ops.h | 15 void vmclear_error(struct vmcs *vmcs, u64 phys_addr); 16 void vmptrld_error(struct vmcs *vmcs, u64 phys_addr); 287 static inline void vmcs_clear(struct vmcs *vmcs) in vmcs_clear() argument 289 u64 phys_addr = __pa(vmcs); in vmcs_clear() 291 vmx_asm1(vmclear, "m"(phys_addr), vmcs, phys_addr); in vmcs_clear() 294 static inline void vmcs_load(struct vmcs *vmcs) in vmcs_load() argument 296 u64 phys_addr = __pa(vmcs); in vmcs_load() 301 vmx_asm1(vmptrld, "m"(phys_addr), vmcs, phys_addr); in vmcs_load()
|
| A D | vmcs.h | 21 struct vmcs { struct 27 DECLARE_PER_CPU(struct vmcs *, current_vmcs); argument 62 struct vmcs *vmcs; member 63 struct vmcs *shadow_vmcs;
|
| A D | vmx.h | 595 static inline u##bits __##lname##_controls_get(struct loaded_vmcs *vmcs) \ 597 return vmcs->controls_shadow.lname; \ 703 struct vmcs *alloc_vmcs_cpu(bool shadow, int cpu, gfp_t flags); 704 void free_vmcs(struct vmcs *vmcs); 709 static inline struct vmcs *alloc_vmcs(bool shadow) in alloc_vmcs()
|
| A D | vmx.c | 468 noinline void vmclear_error(struct vmcs *vmcs, u64 phys_addr) in vmclear_error() argument 474 noinline void vmptrld_error(struct vmcs *vmcs, u64 phys_addr) in vmptrld_error() argument 774 vmcs_clear(v->vmcs); in vmx_emergency_disable_virtualization_cpu() 1450 struct vmcs *prev; in vmx_vcpu_load_vmcs() 2898 struct vmcs *vmcs; in alloc_vmcs_cpu() local 2914 return vmcs; in alloc_vmcs_cpu() 2917 void free_vmcs(struct vmcs *vmcs) in free_vmcs() argument 2927 if (!loaded_vmcs->vmcs) in free_loaded_vmcs() 2940 if (!loaded_vmcs->vmcs) in alloc_loaded_vmcs() 2984 struct vmcs *vmcs; in alloc_kvm_area() local [all …]
|
| A D | nested.c | 295 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs() argument 301 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs() 306 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs() 1595 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_shadow_to_vmcs12() 1615 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12() 1630 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_vmcs12_to_shadow() 1651 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow() 5252 static struct vmcs *alloc_shadow_vmcs(struct kvm_vcpu *vcpu) in alloc_shadow_vmcs() 5717 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()
|
| /linux/tools/perf/util/intel-pt-decoder/ |
| A D | intel-pt-decoder.c | 167 uint64_t vmcs; member 2358 data->vmcs = true; in intel_pt_vm_psb_lookahead_cb() 2526 .vmcs = NO_VMCS, in intel_pt_translate_vm_tsc_offset() 2601 uint64_t vmcs; in intel_pt_vm_tm_corr_tsc() local 2629 vmcs = data->vmcs ? data->vmcs_packet.payload : decoder->vmcs; in intel_pt_vm_tm_corr_tsc() 2630 if (vmcs == NO_VMCS) in intel_pt_vm_tm_corr_tsc() 2631 vmcs = 0; in intel_pt_vm_tm_corr_tsc() 2754 uint64_t vmcs; in intel_pt_vm_tm_corr_pebs_tsc() local 2756 vmcs = decoder->vmcs; in intel_pt_vm_tm_corr_pebs_tsc() 2757 if (vmcs == NO_VMCS) in intel_pt_vm_tm_corr_pebs_tsc() [all …]
|
| A D | intel-pt-decoder.h | 210 uint64_t vmcs; member 280 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs);
|
| /linux/tools/testing/selftests/kvm/lib/x86_64/ |
| A D | vmx.c | 86 vmx->vmcs = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx() 87 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 88 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 163 *(uint32_t *)(vmx->vmcs) = vmcs_revision(); in load_vmcs()
|
| /linux/tools/perf/util/ |
| A D | intel-pt.c | 317 u64 vmcs, in intel_pt_findnew_vmcs() argument 328 if (v->vmcs == vmcs) in intel_pt_findnew_vmcs() 331 if (vmcs < v->vmcs) in intel_pt_findnew_vmcs() 339 v->vmcs = vmcs; in intel_pt_findnew_vmcs() 350 static struct intel_pt_vmcs_info *intel_pt_findnew_vmcs_info(void *data, uint64_t vmcs) in intel_pt_findnew_vmcs_info() argument 355 if (!vmcs && !pt->dflt_tsc_offset) in intel_pt_findnew_vmcs_info() 358 return intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, pt->dflt_tsc_offset); in intel_pt_findnew_vmcs_info() 4053 u64 tsc_offset, vmcs; in intel_pt_parse_vm_tm_corr_arg() local 4073 vmcs = strtoull(p, &p, 0); in intel_pt_parse_vm_tm_corr_arg() 4076 if (!vmcs) in intel_pt_parse_vm_tm_corr_arg() [all …]
|
| /linux/arch/loongarch/include/asm/ |
| A D | kvm_host.h | 119 struct kvm_context __percpu *vmcs; member
|
| /linux/tools/testing/selftests/kvm/include/x86_64/ |
| A D | vmx.h | 507 void *vmcs; member
|
| A D | evmcs.h | 251 static inline int evmcs_vmptrld(uint64_t vmcs_pa, void *vmcs) in evmcs_vmptrld() argument 256 current_evmcs = vmcs; in evmcs_vmptrld()
|