| /arch/x86/kernel/cpu/microcode/ |
| A D | intel-ucode-defs.h | 1 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x03, … 2 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x05, … 3 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x05, … 4 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x05, … 5 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x05, … 6 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x06, … 7 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x06, … 8 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x06, … 9 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x06, … 10 { .flags = X86_CPU_ID_FLAG_ENTRY_VALID, .vendor = X86_VENDOR_INTEL, .family = 0x6, .model = 0x07, … [all …]
|
| A D | core.c | 143 case X86_VENDOR_INTEL: in load_ucode_bsp() 179 case X86_VENDOR_INTEL: in load_ucode_ap() 238 case X86_VENDOR_INTEL: in reload_early_microcode() 829 if (c->x86_vendor == X86_VENDOR_INTEL) in microcode_init()
|
| A D | internal.h | 79 return X86_VENDOR_INTEL; in x86_cpuid_vendor()
|
| A D | intel.c | 392 if (microcode_loader_disabled() || boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in save_builtin_microcode() 645 if (c->x86_vendor != X86_VENDOR_INTEL || c->x86 < 6 || in init_intel_microcode()
|
| /arch/x86/kernel/acpi/ |
| A D | cstate.c | 40 else if (c->x86_vendor == X86_VENDOR_INTEL) { in acpi_processor_power_init_bm_check() 54 if (c->x86_vendor == X86_VENDOR_INTEL && in acpi_processor_power_init_bm_check() 202 if ((c->x86_vendor == X86_VENDOR_INTEL) && !(reg->access_size & 0x2)) in acpi_processor_ffh_cstate_probe() 234 if (c->x86_vendor != X86_VENDOR_INTEL && in ffh_cstate_init()
|
| /arch/x86/kernel/cpu/ |
| A D | perfctr-watchdog.c | 54 case X86_VENDOR_INTEL: in nmi_perfctr_msr_to_bit() 87 case X86_VENDOR_INTEL: in nmi_evntsel_msr_to_bit()
|
| A D | common.c | 104 [X86_VENDOR_INTEL] = { 121 X86_MATCH_VFM(INTEL_IVYBRIDGE_X, &ppin_info[X86_VENDOR_INTEL]), 122 X86_MATCH_VFM(INTEL_HASWELL_X, &ppin_info[X86_VENDOR_INTEL]), 123 X86_MATCH_VFM(INTEL_BROADWELL_D, &ppin_info[X86_VENDOR_INTEL]), 124 X86_MATCH_VFM(INTEL_BROADWELL_X, &ppin_info[X86_VENDOR_INTEL]), 125 X86_MATCH_VFM(INTEL_SKYLAKE_X, &ppin_info[X86_VENDOR_INTEL]), 126 X86_MATCH_VFM(INTEL_ICELAKE_X, &ppin_info[X86_VENDOR_INTEL]), 127 X86_MATCH_VFM(INTEL_ICELAKE_D, &ppin_info[X86_VENDOR_INTEL]), 1338 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in vulnerable_to_its() 1371 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in cpu_has_old_microcode() [all …]
|
| A D | match.c | 28 if (c->x86_vendor == X86_VENDOR_INTEL) in x86_match_vendor_cpu_type()
|
| A D | topology_common.c | 33 if (c->x86_vendor == X86_VENDOR_INTEL) { in get_topology_cpu_type() 164 case X86_VENDOR_INTEL: in parse_topology()
|
| A D | intel.c | 188 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in intel_unlock_cpuid_leafs() 784 .c_x86_vendor = X86_VENDOR_INTEL,
|
| A D | aperfmperf.c | 324 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in bp_init_freq_invariance()
|
| /arch/x86/kernel/cpu/mce/ |
| A D | threshold.c | 63 case X86_VENDOR_INTEL: in mce_handle_storm()
|
| A D | core.c | 522 case X86_VENDOR_INTEL: in mce_usable_address() 539 case X86_VENDOR_INTEL: in mce_is_memory_error() 1582 if (m->cpuvendor == X86_VENDOR_INTEL || in do_machine_check() 2026 case X86_VENDOR_INTEL: in __mcheck_cpu_apply_quirks() 2048 case X86_VENDOR_INTEL: in __mcheck_cpu_ancient_init() 2122 case X86_VENDOR_INTEL: in __mcheck_cpu_init_vendor() 2147 case X86_VENDOR_INTEL: in __mcheck_cpu_clear_vendor() 2189 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in filter_mce() 2457 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL || in vendor_disable_error_reporting()
|
| A D | intel.c | 90 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL && in cmci_supported()
|
| /arch/x86/crypto/ |
| A D | twofish_glue_3way.c | 109 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in is_blacklisted_cpu()
|
| A D | blowfish_glue.c | 140 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in is_blacklisted_cpu()
|
| /arch/x86/include/asm/ |
| A D | acpi.h | 113 return (c->x86_vendor == X86_VENDOR_INTEL || in arch_has_acpi_pdc()
|
| A D | intel-family.h | 43 #define IFM(_fam, _model) VFM_MAKE(X86_VENDOR_INTEL, _fam, _model)
|
| /arch/x86/kernel/cpu/resctrl/ |
| A D | core.c | 262 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in rdt_get_cache_alloc_cfg() 806 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in get_mem_config() 901 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in check_quirks() 957 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in rdt_init_res_defs()
|
| A D | pseudo_lock.c | 63 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL || in resctrl_arch_get_prefetch_disable_bits()
|
| /arch/x86/xen/ |
| A D | pmu.c | 151 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL && in is_intel_pmu_msr() 368 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in xen_read_pmc()
|
| /arch/x86/mm/ |
| A D | cpu_entry_area.c | 111 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in percpu_setup_debug_store()
|
| /arch/x86/kernel/ |
| A D | tsc.c | 668 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in native_calibrate_tsc() 741 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in cpu_khz_from_cpuid() 1291 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) { in unsynchronized_tsc()
|
| /arch/x86/kvm/ |
| A D | pmu.h | 185 bool is_intel = boot_cpu_data.x86_vendor == X86_VENDOR_INTEL; in kvm_init_pmu_capability()
|
| /arch/x86/events/ |
| A D | msr.c | 44 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL || in test_intel()
|