| /arch/x86/kernel/cpu/ |
| A D | perfctr-watchdog.c | 50 case X86_VENDOR_AMD: in nmi_perfctr_msr_to_bit() 83 case X86_VENDOR_AMD: in nmi_evntsel_msr_to_bit()
|
| A D | topology_common.c | 39 if (c->x86_vendor == X86_VENDOR_AMD) { in get_topology_cpu_type() 156 case X86_VENDOR_AMD: in parse_topology() 197 if (c->x86_vendor == X86_VENDOR_AMD) in topo_set_ids()
|
| A D | topology_amd.c | 102 if (tscan->c->x86_vendor == X86_VENDOR_AMD) { in parse_8000_001e() 157 if (cpu_has(c, X86_FEATURE_TOPOEXT) || c->x86_vendor != X86_VENDOR_AMD || in topoext_fixup()
|
| A D | match.c | 30 if (c->x86_vendor == X86_VENDOR_AMD) in x86_match_vendor_cpu_type()
|
| A D | amd.c | 381 X86_MATCH_VFM_STEPS(VFM_MAKE(X86_VENDOR_AMD, fam, model), \ 859 X86_MATCH_VFM_STEPS(VFM_MAKE(X86_VENDOR_AMD, 0x17, 0x01), 0x2, 0x2, 0x0800126e), 860 X86_MATCH_VFM_STEPS(VFM_MAKE(X86_VENDOR_AMD, 0x17, 0x31), 0x0, 0x0, 0x08301052), 1245 .c_x86_vendor = X86_VENDOR_AMD, 1297 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in amd_check_microcode()
|
| A D | cacheinfo.c | 271 return (cpu_vendor == X86_VENDOR_AMD || cpu_vendor == X86_VENDOR_HYGON) ? in fill_cpuid4_info() 283 op = (c->x86_vendor == X86_VENDOR_AMD || c->x86_vendor == X86_VENDOR_HYGON) ? 0x8000001d : 4; in find_num_cache_leaves() 547 if (c->x86_vendor == X86_VENDOR_AMD || c->x86_vendor == X86_VENDOR_HYGON) { in __cache_cpumap_setup() 633 if (cpu_vendor == X86_VENDOR_AMD || cpu_vendor == X86_VENDOR_HYGON) in populate_cache_leaves()
|
| /arch/x86/include/asm/ |
| A D | geode.h | 23 return ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD) && in is_geode_lx()
|
| A D | acpi.h | 100 boot_cpu_data.x86_vendor == X86_VENDOR_AMD && in acpi_processor_cstate_check()
|
| A D | processor.h | 194 #define X86_VENDOR_AMD 2 macro
|
| /arch/x86/kernel/ |
| A D | amd_nb.c | 128 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in early_is_amd_nb() 147 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in amd_get_mmconfig_range() 318 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in init_amd_nbs()
|
| /arch/x86/kernel/cpu/microcode/ |
| A D | core.c | 88 if (x86_cpuid_vendor() != X86_VENDOR_AMD) in amd_check_current_patch_level() 148 case X86_VENDOR_AMD: in load_ucode_bsp() 183 case X86_VENDOR_AMD: in load_ucode_ap() 242 case X86_VENDOR_AMD: in reload_early_microcode() 831 else if (c->x86_vendor == X86_VENDOR_AMD) in microcode_init()
|
| A D | internal.h | 82 return X86_VENDOR_AMD; in x86_cpuid_vendor()
|
| A D | amd.c | 1104 if (microcode_loader_disabled() || c->x86_vendor != X86_VENDOR_AMD || c->x86 < 0x10) in save_microcode_in_initrd() 1197 if (c->x86_vendor != X86_VENDOR_AMD || c->x86 < 0x10) { in init_amd_microcode()
|
| /arch/x86/kernel/acpi/ |
| A D | cstate.c | 92 if (c->x86_vendor == X86_VENDOR_AMD && c->x86 >= 0x17) { in acpi_processor_power_init_bm_check() 235 c->x86_vendor != X86_VENDOR_AMD && in ffh_cstate_init()
|
| A D | cppc.c | 30 case X86_VENDOR_AMD: in cpc_supported_by_cpu() 122 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in init_freq_invariance_cppc()
|
| /arch/x86/include/asm/amd/ |
| A D | nb.h | 53 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in amd_gart_present()
|
| /arch/x86/kernel/cpu/mtrr/ |
| A D | legacy.c | 14 case X86_VENDOR_AMD: in mtrr_set_if()
|
| /arch/x86/xen/ |
| A D | enlighten.c | 88 if ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD || in xen_hypercall_setfunc() 119 if ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD || in __xen_hypercall_setfunc()
|
| A D | pmu.c | 72 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) { in xen_pmu_arch_init() 134 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in is_amd_pmu_msr()
|
| /arch/x86/kernel/cpu/mce/ |
| A D | core.c | 230 if (m->cpuvendor != X86_VENDOR_AMD && m->cpuvendor != X86_VENDOR_HYGON) in print_mce() 519 case X86_VENDOR_AMD: in mce_usable_address() 535 case X86_VENDOR_AMD: in mce_is_memory_error() 574 if (m->cpuvendor == X86_VENDOR_AMD && m->status & MCI_STATUS_DEFERRED) in mce_is_correctable() 2023 case X86_VENDOR_AMD: in __mcheck_cpu_apply_quirks() 2068 if (c->x86_vendor == X86_VENDOR_AMD || c->x86_vendor == X86_VENDOR_HYGON) { in __mcheck_cpu_init_early() 2126 case X86_VENDOR_AMD: in __mcheck_cpu_init_vendor() 2187 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in filter_mce() 2459 boot_cpu_data.x86_vendor == X86_VENDOR_AMD || in vendor_disable_error_reporting()
|
| A D | dev-mcelog.c | 70 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in dev_mce_log()
|
| /arch/x86/kernel/cpu/resctrl/ |
| A D | core.c | 808 else if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in get_mem_config() 821 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in get_slow_mem_config() 959 else if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in rdt_init_res_defs() 990 if (c->x86_vendor == X86_VENDOR_AMD && !c->x86_cache_mbm_width_offset) in resctrl_cpu_detect()
|
| /arch/x86/pci/ |
| A D | amd_bus.c | 402 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in amd_postcore_init()
|
| /arch/x86/kernel/apic/ |
| A D | apic.c | 202 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD && in modern_apic() 1223 if (modern_apic() || boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in sync_Arb_IDs() 2007 case X86_VENDOR_AMD: in detect_init_APIC()
|
| /arch/x86/mm/ |
| A D | fault.c | 118 c->x86_vendor == X86_VENDOR_AMD && in is_amd_k8_pre_npt() 422 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD in is_errata93()
|