| /xen/xen/arch/x86/include/asm/ |
| A D | hardirq.h | 17 #define in_irq() (local_irq_count(smp_processor_id()) != 0) 19 #define irq_enter() (local_irq_count(smp_processor_id())++) 20 #define irq_exit() (local_irq_count(smp_processor_id())--) 23 #define in_nmi_handler() (nmi_count(smp_processor_id()) != 0) 24 #define nmi_enter() (nmi_count(smp_processor_id())++) 25 #define nmi_exit() (nmi_count(smp_processor_id())--) 28 #define in_mce_handler() (mce_count(smp_processor_id()) != 0) 29 #define mce_enter() (mce_count(smp_processor_id())++) 30 #define mce_exit() (mce_count(smp_processor_id())--)
|
| /xen/xen/arch/x86/ |
| A D | smp.c | 96 cpumask_or(scratch, mask, cpumask_of(smp_processor_id())); in send_IPI_mask() 176 mask &= ~(1UL << smp_processor_id()); in send_IPI_mask_flat() 215 if ( !cpu_online(query_cpu) || (query_cpu == smp_processor_id()) ) in send_IPI_mask_phys() 257 cpumask_clear_cpu(smp_processor_id(), &flush_cpumask); in invalidate_interrupt() 262 unsigned int cpu = smp_processor_id(); in flush_area_mask() 301 cpumask_of(smp_processor_id())); in new_tlbflush_clock_period() 318 if ( cpumask_test_cpu(smp_processor_id(), mask) ) in smp_send_call_function_mask() 341 cpumask_clear_cpu(smp_processor_id(), &cpu_online_map); in __stop_this_cpu() 357 unsigned int cpu = smp_processor_id(); in smp_send_stop()
|
| /xen/xen/include/asm-generic/ |
| A D | hardirq.h | 15 #define in_irq() (local_irq_count(smp_processor_id()) != 0) 17 #define irq_enter() (local_irq_count(smp_processor_id())++) 18 #define irq_exit() (local_irq_count(smp_processor_id())--)
|
| /xen/xen/common/ |
| A D | softirq.c | 42 cpu = smp_processor_id(); in __do_softirq() 85 unsigned int cpu, this_cpu = smp_processor_id(); in cpumask_raise_softirq() 108 unsigned int this_cpu = smp_processor_id(); in cpu_raise_softirq() 128 unsigned int cpu, this_cpu = smp_processor_id(); in cpu_raise_softirq_batch_finish() 142 set_bit(nr, &softirq_pending(smp_processor_id())); in raise_softirq()
|
| A D | tasklet.c | 75 tasklet_schedule_on_cpu(t, smp_processor_id()); in tasklet_schedule() 109 unsigned int cpu = smp_processor_id(); in do_tasklet() 140 unsigned int cpu = smp_processor_id(); in tasklet_softirq_action() 194 t->scheduled_on = smp_processor_id(); in migrate_tasklets_from_cpu() 248 void *hcpu = (void *)(long)smp_processor_id(); in tasklet_subsys_init()
|
| A D | stop_machine.c | 79 unsigned int this = smp_processor_id(); in stop_machine_run() 149 BUG_ON(cpu != smp_processor_id()); in stopmachine_action() 165 if ( (stopmachine_data.fn_cpu == smp_processor_id()) || in stopmachine_action()
|
| A D | spinlock.c | 74 (void *)(unsigned long)smp_processor_id()); in lockdebug_init() 84 unsigned int cpu = smp_processor_id(); in check_lock() 168 unsigned int cpu = smp_processor_id(); in lock_enter() 192 unsigned int cpu = smp_processor_id(); in lock_exit() 229 debug->cpu = smp_processor_id(); in got_lock() 237 BUG_ON(debug->cpu != smp_processor_id()); in rel_lock() 474 : lock->recurse_cpu == smp_processor_id(); in _rspin_is_locked() 484 unsigned int cpu = smp_processor_id(); in _rspin_trylock() 511 unsigned int cpu = smp_processor_id(); in _rspin_lock()
|
| A D | smp.c | 43 cpumask_of(smp_processor_id())); in smp_call_function() 80 unsigned int cpu = smp_processor_id(); in smp_call_function_interrupt()
|
| A D | cpu.c | 99 cpu_notifier_call_chain(smp_processor_id(), CPU_DYING, NULL, true); in _take_cpu_down() 198 BUG_ON(smp_processor_id() != 0); in disable_nonboot_cpus() 231 if ( park_offline_cpus ? cpu == smp_processor_id() in enable_nonboot_cpus()
|
| A D | keyhandler.c | 140 unsigned int cpu = smp_processor_id(); in dump_execstate() 151 smp_processor_id(), current); in dump_execstate() 217 if ( softirq_pending(smp_processor_id()) ) in dump_hwdom_action() 237 if ( alt_key_handling && softirq_pending(smp_processor_id()) ) in dump_hwdom_registers() 377 unsigned int cpu = smp_processor_id(); in read_clocks_slave() 389 unsigned int cpu = smp_processor_id(), min_stime_cpu, max_stime_cpu; in read_clocks()
|
| /xen/xen/arch/arm/ |
| A D | smp.c | 24 cpumask_andnot(&target_mask, mask, cpumask_of(smp_processor_id())); in smp_send_call_function_mask() 28 if ( cpumask_test_cpu(smp_processor_id(), mask) ) in smp_send_call_function_mask()
|
| A D | cpuerrata.c | 105 smp_processor_id(), desc); in install_bp_hardening_vec() 177 if ( cpu_data[smp_processor_id()].pfr64.csv2 ) in enable_smccc_arch_workaround_1() 222 if ( cpu_data[smp_processor_id()].pfr64.csv2 == 3 ) in enable_spectre_bhb_workaround() 225 if ( cpu_data[smp_processor_id()].mm64.ecbhb ) in enable_spectre_bhb_workaround() 228 if ( cpu_data[smp_processor_id()].isa64.clearbhb ) in enable_spectre_bhb_workaround() 314 smp_processor_id(), desc); in install_bp_hardening_vecs()
|
| A D | smpboot.c | 351 smp_processor_id(), current_cpu_data.midr.bits, in start_secondary() 360 smp_processor_id(), current_cpu_data.midr.bits, in start_secondary() 369 smp_processor_id(), read_dcache_line_bytes(), in start_secondary() 412 printk(XENLOG_DEBUG "CPU %u booted.\n", smp_processor_id()); in start_secondary() 420 unsigned int cpu = smp_processor_id(); in __cpu_disable()
|
| A D | guest_atomics.c | 46 smp_processor_id(), counter); in calibrate_safe_atomic()
|
| /xen/xen/arch/ppc/include/asm/ |
| A D | current.h | 43 #define smp_processor_id() 0 /* TODO: Fix this */ macro 45 #define get_per_cpu_offset() smp_processor_id() /* TODO: Fix this */
|
| /xen/xen/arch/x86/oprofile/ |
| A D | nmi_int.c | 136 int cpu = smp_processor_id(); in nmi_save_registers() 184 int cpu = smp_processor_id(); in nmi_cpu_setup() 250 int cpu = smp_processor_id(); in nmi_cpu_shutdown() 266 int cpu = smp_processor_id(); in nmi_cpu_start() 284 int cpu = smp_processor_id(); in nmi_cpu_stop()
|
| /xen/xen/arch/x86/genapic/ |
| A D | x2apic.c | 32 unsigned int cpu, this_cpu = smp_processor_id(); in init_apic_ldr_x2apic_cluster() 114 if ( !cpu_online(cpu) || (cpu == smp_processor_id()) ) in send_IPI_mask_x2apic_phys() 128 unsigned int cpu = smp_processor_id(); in send_IPI_mask_x2apic_cluster() 306 (void *)(long)smp_processor_id()); in apic_x2apic_probe()
|
| A D | delivery.c | 18 val |= SET_xAPIC_LOGICAL_ID(1UL << smp_processor_id()); in init_apic_ldr_flat()
|
| /xen/xen/arch/x86/cpu/mcheck/ |
| A D | mce_intel.c | 60 unsigned int cpu = smp_processor_id(); in intel_thermal_interrupt() 116 unsigned int cpu = smp_processor_id(); in intel_init_thermal() 492 unsigned int cpu = smp_processor_id(); in do_cmci_discover() 536 unsigned int i, cpu = smp_processor_id(); in cmci_discover() 614 unsigned int i, cpu = smp_processor_id(); in clear_cmci() 672 int cpu = smp_processor_id(); in intel_init_cmci() 770 if ( smp_processor_id() == 0 ) in intel_init_mca() 795 smp_processor_id(), first, ext_num, in intel_init_mca() 981 unsigned int cpu = smp_processor_id(); in intel_mcheck_init()
|
| A D | mce.c | 225 cpu_nr = smp_processor_id(); in mca_init_global() 446 unsigned int cpu = smp_processor_id(); in mcheck_cmn_handler() 586 smp_processor_id()); in show_mca_info() 613 int cpu = smp_processor_id(); in set_poll_bankmask() 640 unsigned int nr, cpu = smp_processor_id(); in mca_cap_init() 738 unsigned int cpu = smp_processor_id(); in mcheck_init() 811 cpu_bank_free(smp_processor_id()); in mcheck_init() 937 int cpu = smp_processor_id(); in do_mc_get_cpu_info() 1233 mci->mcinj_cpunr, smp_processor_id(), in x86_mc_msrinject() 1250 printk("Simulating #MC on cpu %d\n", smp_processor_id()); in x86_mc_mceinject() [all …]
|
| /xen/xen/arch/x86/cpu/ |
| A D | intel.c | 445 smp_processor_id(), ecx, ebx, eax, val / eax); in intel_log_freq() 449 printk("CPU%u: TSC:", smp_processor_id()); in intel_log_freq() 463 printk("CPU%u:", smp_processor_id()); in intel_log_freq() 532 printk("CPU%u: ", smp_processor_id()); in intel_log_freq() 563 smp_processor_id(), global_ctrl, in init_intel()
|
| A D | amd.c | 488 smp_processor_id(), msr_content); in disable_c1e() 532 cpu = smp_processor_id(); in amd_get_topology() 667 smp_processor_id(), FREQ(val), FREQ(lo), FREQ(hi)); in amd_log_freq() 670 smp_processor_id(), FREQ(lo), FREQ(hi)); in amd_log_freq() 672 printk("CPU%u: %lu MHz\n", smp_processor_id(), FREQ(lo)); in amd_log_freq() 981 if (smp_processor_id() == 0) in amd_check_zenbleed() 1235 smp_processor_id()); in init_amd() 1244 smp_processor_id()); in init_amd() 1296 if ((smp_processor_id() == 1) && !cpu_has(c, X86_FEATURE_ITSC)) in init_amd()
|
| /xen/xen/arch/riscv/include/asm/ |
| A D | current.h | 28 #define get_per_cpu_offset() __per_cpu_offset[smp_processor_id()]
|
| /xen/xen/arch/x86/cpu/microcode/ |
| A D | core.c | 303 smp_processor_id(), __builtin_return_address(0)); in wait_for_condition() 375 cpumask_set_cpu(smp_processor_id(), &cpu_callin_map); in secondary_nmi_work() 384 cpumask_set_cpu(smp_processor_id(), &cpu_callin_map); in primary_thread_work() 472 unsigned int cpu = smp_processor_id(), done; in control_thread_fn() 554 unsigned int cpu = smp_processor_id(); in do_microcode_update()
|
| /xen/xen/arch/x86/guest/hyperv/ |
| A D | hyperv.c | 128 smp_processor_id()); in setup_hypercall_pcpu_arg() 151 smp_processor_id()); in setup_vp_assist()
|