| /arch/x86/kvm/ |
| A D | cpuid.h | 254 static __always_inline bool guest_cpu_cap_has(struct kvm_vcpu *vcpu, in guest_cpu_cap_has() function 272 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LAM)) in kvm_vcpu_is_legal_cr3() 280 return (guest_cpu_cap_has(vcpu, X86_FEATURE_SPEC_CTRL) || in guest_has_spec_ctrl_msr() 281 guest_cpu_cap_has(vcpu, X86_FEATURE_AMD_STIBP) || in guest_has_spec_ctrl_msr() 282 guest_cpu_cap_has(vcpu, X86_FEATURE_AMD_IBRS) || in guest_has_spec_ctrl_msr() 283 guest_cpu_cap_has(vcpu, X86_FEATURE_AMD_SSBD)); in guest_has_spec_ctrl_msr() 288 return (guest_cpu_cap_has(vcpu, X86_FEATURE_SPEC_CTRL) || in guest_has_pred_cmd_msr() 289 guest_cpu_cap_has(vcpu, X86_FEATURE_AMD_IBPB) || in guest_has_pred_cmd_msr() 290 guest_cpu_cap_has(vcpu, X86_FEATURE_SBPB)); in guest_has_pred_cmd_msr()
|
| A D | smm.c | 287 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in enter_smm() 357 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in enter_smm() 590 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm() 613 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm() 638 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in emulator_leave_smm()
|
| A D | x86.h | 159 if (guest_cpu_cap_has(vcpu, X86_FEATURE_SPEC_CTRL) || in kvm_nested_vmexit_handle_ibrs() 160 guest_cpu_cap_has(vcpu, X86_FEATURE_AMD_IBRS)) in kvm_nested_vmexit_handle_ibrs()
|
| A D | mmu.h | 142 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_LAM)) in kvm_get_active_cr3_lam_bits()
|
| A D | x86.c | 1519 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_RTM)) in kvm_dr6_fixed() 1710 !guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in __kvm_valid_efer() 1856 !guest_cpu_cap_has(vcpu, X86_FEATURE_RDPID)) in __kvm_set_msr() 1914 !guest_cpu_cap_has(vcpu, X86_FEATURE_RDPID)) in __kvm_get_msr() 3809 !guest_cpu_cap_has(vcpu, X86_FEATURE_PDCM)) in kvm_set_msr_common() 4113 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_OSVW)) in kvm_set_msr_common() 4118 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_OSVW)) in kvm_set_msr_common() 4137 !guest_cpu_cap_has(vcpu, X86_FEATURE_XFD)) in kvm_set_msr_common() 4147 !guest_cpu_cap_has(vcpu, X86_FEATURE_XFD)) in kvm_set_msr_common() 4492 !guest_cpu_cap_has(vcpu, X86_FEATURE_XFD)) in kvm_get_msr_common() [all …]
|
| A D | cpuid.c | 413 guest_cpu_cap_has(vcpu, X86_FEATURE_GBPAGES); in kvm_vcpu_after_set_cpuid() 438 __cr4_reserved_bits(guest_cpu_cap_has, vcpu); in kvm_vcpu_after_set_cpuid()
|
| A D | lapic.c | 560 if (guest_cpu_cap_has(vcpu, X86_FEATURE_X2APIC) && in kvm_apic_set_version() 2615 (guest_cpu_cap_has(vcpu, X86_FEATURE_X2APIC) ? 0 : X2APIC_ENABLE); in kvm_apic_set_base()
|
| A D | hyperv.c | 1358 !guest_cpu_cap_has(vcpu, X86_FEATURE_XSAVEC)) in __kvm_hv_xsaves_xsavec_maybe_warn()
|
| /arch/x86/kvm/svm/ |
| A D | pmu.c | 49 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_PERFCTR_CORE)) in get_gp_pmc_amd() 112 return guest_cpu_cap_has(vcpu, X86_FEATURE_PERFCTR_CORE); in amd_is_valid_msr() 182 if (guest_cpu_cap_has(vcpu, X86_FEATURE_PERFMON_V2)) { in amd_pmu_refresh() 192 } else if (guest_cpu_cap_has(vcpu, X86_FEATURE_PERFCTR_CORE)) { in amd_pmu_refresh()
|
| A D | nested.c | 114 if (!guest_cpu_cap_has(&svm->vcpu, X86_FEATURE_V_VMSAVE_VMLOAD)) in nested_vmcb_needs_vls_intercept() 664 if (unlikely(guest_cpu_cap_has(vcpu, X86_FEATURE_LBRV) && in nested_vmcb02_prepare_save() 726 if (guest_cpu_cap_has(vcpu, X86_FEATURE_VGIF) && in nested_vmcb02_prepare_control() 791 if (guest_cpu_cap_has(vcpu, X86_FEATURE_TSCRATEMSR) && in nested_vmcb02_prepare_control() 812 if (guest_cpu_cap_has(vcpu, X86_FEATURE_NRIPS)) in nested_vmcb02_prepare_control() 822 if (guest_cpu_cap_has(vcpu, X86_FEATURE_NRIPS)) in nested_vmcb02_prepare_control() 830 if (guest_cpu_cap_has(vcpu, X86_FEATURE_LBRV)) in nested_vmcb02_prepare_control() 837 if (guest_cpu_cap_has(vcpu, X86_FEATURE_PAUSEFILTER)) in nested_vmcb02_prepare_control() 841 if (guest_cpu_cap_has(vcpu, X86_FEATURE_PFTHRESHOLD)) in nested_vmcb02_prepare_control() 1123 if (guest_cpu_cap_has(vcpu, X86_FEATURE_NRIPS)) in nested_svm_vmexit() [all …]
|
| A D | svm.c | 818 !guest_cpu_cap_has(vcpu, X86_FEATURE_FLUSH_L1D)); in svm_recalc_msr_intercepts() 1057 if (guest_cpu_cap_has(vcpu, X86_FEATURE_RDTSCP)) in svm_recalc_instruction_intercepts() 2747 !guest_cpu_cap_has(vcpu, X86_FEATURE_TSCRATEMSR)) in svm_get_msr() 2823 !guest_cpu_cap_has(vcpu, X86_FEATURE_VIRT_SSBD)) in svm_get_msr() 2925 if (guest_cpu_cap_has(vcpu, X86_FEATURE_TSCRATEMSR) && in svm_set_msr() 2970 !guest_cpu_cap_has(vcpu, X86_FEATURE_VIRT_SSBD)) in svm_set_msr() 3151 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_INVPCID)) { in invpcid_interception() 4459 guest_cpu_cap_has(vcpu, X86_FEATURE_XSAVE)); in svm_vcpu_after_set_cpuid() 4718 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in svm_enter_smm() 4765 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in svm_leave_smm() [all …]
|
| A D | svm.h | 504 return guest_cpu_cap_has(&svm->vcpu, X86_FEATURE_VGIF) && in nested_vgif_enabled() 556 return guest_cpu_cap_has(&svm->vcpu, X86_FEATURE_VNMI) && in nested_vnmi_enabled()
|
| A D | sev.c | 4452 !guest_cpu_cap_has(vcpu, X86_FEATURE_RDTSCP) && in sev_es_recalc_msr_intercepts() 4453 !guest_cpu_cap_has(vcpu, X86_FEATURE_RDPID)); in sev_es_recalc_msr_intercepts() 4468 !guest_cpu_cap_has(vcpu, X86_FEATURE_XSAVES) || in sev_es_recalc_msr_intercepts()
|
| /arch/x86/kvm/vmx/ |
| A D | sgx.c | 126 guest_cpu_cap_has(vcpu, X86_FEATURE_SGX2)) { in sgx_inject_fault() 369 return guest_cpu_cap_has(vcpu, X86_FEATURE_SGX2); in encls_leaf_enabled_in_guest() 385 if (!enable_sgx || !guest_cpu_cap_has(vcpu, X86_FEATURE_SGX) || in handle_encls() 386 !guest_cpu_cap_has(vcpu, X86_FEATURE_SGX1)) { in handle_encls() 483 if (guest_cpu_cap_has(vcpu, X86_FEATURE_SGX) && in vmx_write_encls_bitmap() 485 if (guest_cpu_cap_has(vcpu, X86_FEATURE_SGX1)) { in vmx_write_encls_bitmap() 491 if (guest_cpu_cap_has(vcpu, X86_FEATURE_SGX2)) in vmx_write_encls_bitmap()
|
| A D | vmx.c | 2011 !guest_cpu_cap_has(vcpu, X86_FEATURE_MPX))) in vmx_get_msr() 2033 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_VMX)) in vmx_get_msr() 2116 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_LM)) in nested_vmx_truncate_sysenter_addr() 2240 !guest_cpu_cap_has(vcpu, X86_FEATURE_MPX))) in vmx_set_msr() 2352 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_VMX)) in vmx_set_msr() 2426 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_DS)) in vmx_set_msr() 4556 guest_cpu_cap_has(vcpu, X86_FEATURE_RDTSCP) || in vmx_secondary_exec_control() 4557 guest_cpu_cap_has(vcpu, X86_FEATURE_RDPID); in vmx_secondary_exec_control() 7738 if (guest_cpu_cap_has(vcpu, X86_FEATURE_VMX)) in vmx_vcpu_after_set_cpuid() 7747 if (guest_cpu_cap_has(vcpu, X86_FEATURE_VMX)) in vmx_vcpu_after_set_cpuid() [all …]
|
| A D | pmu_intel.c | 133 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_PDCM)) in vcpu_get_perf_capabilities() 199 ret = guest_cpu_cap_has(vcpu, X86_FEATURE_DS); in intel_is_valid_msr()
|
| A D | nested.c | 6328 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_SGX) || in nested_vmx_exit_handled_encls() 6666 if (guest_cpu_cap_has(vcpu, X86_FEATURE_VMX) && in vmx_get_nested_state() 6807 if (!guest_cpu_cap_has(vcpu, X86_FEATURE_VMX)) in vmx_set_nested_state() 6841 (!guest_cpu_cap_has(vcpu, X86_FEATURE_VMX) || in vmx_set_nested_state()
|
| /arch/x86/kvm/mmu/ |
| A D | mmu.c | 5267 guest_cpu_cap_has(vcpu, X86_FEATURE_GBPAGES), in reset_guest_rsvds_bits_mask() 5344 guest_cpu_cap_has(vcpu, X86_FEATURE_GBPAGES), in reset_shadow_zero_bits_mask()
|