| /linux/arch/powerpc/kernel/ |
| A D | watchdog.c | 213 if (cpumask_test_cpu(cpu, &wd_smp_cpus_pending)) in watchdog_smp_panic() 218 if (!cpumask_test_cpu(c, &wd_smp_cpus_pending)) in watchdog_smp_panic() 265 if (!cpumask_test_cpu(cpu, &wd_smp_cpus_pending)) { in wd_smp_clear_cpu_pending() 266 if (unlikely(cpumask_test_cpu(cpu, &wd_smp_cpus_stuck))) { in wd_smp_clear_cpu_pending() 378 if (!cpumask_test_cpu(cpu, &wd_cpus_enabled)) in DEFINE_INTERRUPT_HANDLER_NMI() 392 if (cpumask_test_cpu(cpu, &wd_smp_cpus_stuck)) { in DEFINE_INTERRUPT_HANDLER_NMI() 444 if (!cpumask_test_cpu(cpu, &watchdog_cpumask)) in watchdog_timer_fn() 460 if (!cpumask_test_cpu(cpu, &watchdog_cpumask)) in arch_touch_nmi_watchdog() 477 if (cpumask_test_cpu(cpu, &wd_cpus_enabled)) { in start_watchdog() 485 if (!cpumask_test_cpu(cpu, &watchdog_cpumask)) in start_watchdog() [all …]
|
| /linux/arch/sparc/kernel/ |
| A D | sun4m_smp.c | 66 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in sun4m_cpu_pre_online() 197 if (cpumask_test_cpu(i, &mask)) { in sun4m_cross_call() 213 if (!cpumask_test_cpu(i, &mask)) in sun4m_cross_call() 221 if (!cpumask_test_cpu(i, &mask)) in sun4m_cross_call()
|
| A D | sun4d_smp.c | 103 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in sun4d_cpu_pre_online() 321 if (cpumask_test_cpu(i, &mask)) { in sun4d_cross_call() 334 if (!cpumask_test_cpu(i, &mask)) in sun4d_cross_call() 342 if (!cpumask_test_cpu(i, &mask)) in sun4d_cross_call()
|
| A D | leon_smp.c | 98 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in leon_cpu_pre_online() 409 if (cpumask_test_cpu(i, &mask)) { in leon_cross_call() 423 if (!cpumask_test_cpu(i, &mask)) in leon_cross_call() 432 if (!cpumask_test_cpu(i, &mask)) in leon_cross_call()
|
| /linux/arch/alpha/kernel/ |
| A D | sys_titan.c | 87 if (!cpumask_test_cpu(0, &cpm)) dim0 = &dummy; in titan_update_irq_hw() 88 if (!cpumask_test_cpu(1, &cpm)) dim1 = &dummy; in titan_update_irq_hw() 89 if (!cpumask_test_cpu(2, &cpm)) dim2 = &dummy; in titan_update_irq_hw() 90 if (!cpumask_test_cpu(3, &cpm)) dim3 = &dummy; in titan_update_irq_hw() 140 if (cpumask_test_cpu(cpu, &affinity)) in titan_cpu_set_irq_affinity()
|
| /linux/kernel/irq/ |
| A D | cpuhotplug.c | 50 return cpumask_test_cpu(cpu, m); in irq_needs_fixup() 202 return cpumask_test_cpu(cpu, hk_mask); in hk_should_isolate() 211 !irq_data_get_irq_chip(data) || !cpumask_test_cpu(cpu, affinity)) in irq_restore_affinity_of_irq()
|
| /linux/kernel/time/ |
| A D | tick-broadcast.c | 284 if (!cpumask_test_cpu(cpu, tick_broadcast_on)) in tick_device_uses_broadcast() 318 ret = cpumask_test_cpu(cpu, tick_broadcast_mask); in tick_device_uses_broadcast() 355 if (cpumask_test_cpu(cpu, mask)) { in tick_do_broadcast() 576 return cpumask_test_cpu(smp_processor_id(), tick_broadcast_mask); in tick_resume_check_broadcast() 631 return cpumask_test_cpu(smp_processor_id(), tick_broadcast_force_mask); in tick_check_broadcast_expired() 672 if (cpumask_test_cpu(smp_processor_id(), tick_broadcast_oneshot_mask)) { in tick_check_oneshot_broadcast_this_cpu() 831 WARN_ON_ONCE(cpumask_test_cpu(cpu, tick_broadcast_pending_mask)); in ___tick_broadcast_oneshot_control() 846 if (cpumask_test_cpu(cpu, tick_broadcast_force_mask)) { in ___tick_broadcast_oneshot_control()
|
| /linux/lib/ |
| A D | nmi_backtrace.c | 61 if (cpumask_test_cpu(this_cpu, to_cpumask(backtrace_mask))) in nmi_trigger_cpumask_backtrace() 99 if (cpumask_test_cpu(cpu, to_cpumask(backtrace_mask))) { in nmi_cpu_backtrace()
|
| /linux/arch/powerpc/mm/ |
| A D | mmu_context.c | 50 if (!cpumask_test_cpu(cpu, mm_cpumask(next))) { in switch_mm_irqs_off() 106 VM_WARN_ON_ONCE(!cpumask_test_cpu(cpu, mm_cpumask(prev))); in switch_mm_irqs_off()
|
| /linux/drivers/perf/ |
| A D | arm_pmu.c | 337 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_add() 511 !cpumask_test_cpu(event->cpu, &armpmu->supported_cpus)) in armpmu_event_init() 528 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_enable() 540 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_disable() 554 return !cpumask_test_cpu(cpu, &armpmu->supported_cpus); in armpmu_filter() 707 if (!cpumask_test_cpu(cpu, &pmu->supported_cpus)) in arm_perf_starting_cpu() 726 if (!cpumask_test_cpu(cpu, &pmu->supported_cpus)) in arm_perf_teardown_cpu() 777 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in cpu_pm_pmu_notify()
|
| A D | arm_dsu_pmu.c | 237 if (WARN_ON(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_read_counter() 263 if (WARN_ON(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_write_counter() 429 if (WARN_ON_ONCE(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_add() 549 if (!cpumask_test_cpu(event->cpu, &dsu_pmu->associated_cpus)) { in dsu_pmu_event_init() 798 if (!cpumask_test_cpu(cpu, &dsu_pmu->associated_cpus)) in dsu_pmu_cpu_online()
|
| /linux/arch/arm/mach-tegra/ |
| A D | platsmp.c | 93 if (cpumask_test_cpu(cpu, &tegra_cpu_init_mask)) { in tegra30_boot_secondary() 135 if (cpumask_test_cpu(cpu, &tegra_cpu_init_mask)) { in tegra114_boot_secondary()
|
| /linux/arch/arm/include/asm/ |
| A D | cacheflush.h | 220 if (cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) in vivt_flush_cache_mm() 229 if (!mm || cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) in vivt_flush_cache_range() 239 if (!mm || cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) { in vivt_flush_cache_pages()
|
| /linux/include/linux/ |
| A D | cpumask.h | 568 bool cpumask_test_cpu(int cpu, const struct cpumask *cpumask) in cpumask_test_cpu() function 1117 return cpumask_test_cpu(cpu, cpu_online_mask); in cpu_online() 1122 return cpumask_test_cpu(cpu, cpu_enabled_mask); in cpu_enabled() 1127 return cpumask_test_cpu(cpu, cpu_possible_mask); in cpu_possible() 1132 return cpumask_test_cpu(cpu, cpu_present_mask); in cpu_present() 1137 return cpumask_test_cpu(cpu, cpu_active_mask); in cpu_active() 1142 return cpumask_test_cpu(cpu, cpu_dying_mask); in cpu_dying()
|
| A D | mmu_context.h | 28 # define task_cpu_possible(cpu, p) cpumask_test_cpu((cpu), task_cpu_possible_mask(p))
|
| /linux/arch/x86/kernel/apic/ |
| A D | ipi.c | 89 if (cpumask_test_cpu(cpu, mask)) in native_send_call_func_ipi() 104 if (WARN_ON_ONCE(!cpumask_test_cpu(cpu, &cpus_booted_once_mask))) in apic_send_nmi_to_offline_cpu()
|
| /linux/drivers/sh/intc/ |
| A D | chip.c | 25 if (!cpumask_test_cpu(cpu, irq_data_get_affinity_mask(data))) in _intc_enable() 53 if (!cpumask_test_cpu(cpu, irq_data_get_affinity_mask(data))) in intc_disable()
|
| /linux/drivers/cpuidle/ |
| A D | coupled.c | 429 if (!cpumask_test_cpu(cpu, &cpuidle_coupled_poke_pending)) in cpuidle_coupled_clear_pokes() 433 while (cpumask_test_cpu(cpu, &cpuidle_coupled_poke_pending)) in cpuidle_coupled_clear_pokes() 515 !cpumask_test_cpu(dev->cpu, &cpuidle_coupled_poked)) { in cpuidle_enter_state_coupled()
|
| /linux/arch/arm/mach-omap2/ |
| A D | cpuidle44xx.c | 133 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled() 191 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled()
|
| /linux/arch/arm64/kernel/ |
| A D | topology.c | 103 if ((cpu >= nr_cpu_ids) || !cpumask_test_cpu(cpu, cpu_present_mask)) in freq_counters_valid() 316 if ((cpu >= nr_cpu_ids) || !cpumask_test_cpu(cpu, cpu_present_mask)) in cpc_ffh_supported()
|
| /linux/drivers/hwtracing/coresight/ |
| A D | coresight-trbe.c | 1107 if (!cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in is_perf_trbe() 1375 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_probe_coresight() 1377 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_probe_coresight() 1406 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) { in arm_trbe_cpu_startup() 1414 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_startup() 1416 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_startup() 1429 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_teardown()
|
| /linux/kernel/sched/ |
| A D | cpufreq.c | 71 return cpumask_test_cpu(smp_processor_id(), policy->cpus) || in cpufreq_this_cpu_can_update()
|
| /linux/kernel/trace/ |
| A D | ring_buffer.c | 963 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_wait() 1013 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_poll_wait() 4600 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_write() 4789 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_record_disable_cpu() 4809 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_record_enable_cpu() 4842 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_oldest_event_ts() 4873 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_bytes_cpu() 4892 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_entries_cpu() 4912 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_overrun_cpu() 4935 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_commit_overrun_cpu() [all …]
|
| /linux/drivers/virt/nitro_enclaves/ |
| A D | ne_misc_dev.c | 253 if (cpumask_test_cpu(0, cpu_pool)) { in ne_setup_cpu_pool() 262 if (cpumask_test_cpu(cpu_sibling, cpu_pool)) { in ne_setup_cpu_pool() 279 if (!cpumask_test_cpu(cpu_sibling, cpu_pool)) { in ne_setup_cpu_pool() 482 if (cpumask_test_cpu(cpu, ne_enclave->vcpu_ids)) in ne_donated_cpu() 632 if (cpumask_test_cpu(vcpu_id, ne_cpu_pool.avail_threads_per_core[i])) { in ne_get_vcpu_core_from_cpu_pool() 670 if (cpumask_test_cpu(vcpu_id, ne_enclave->threads_per_core[i])) in ne_check_cpu_in_cpu_pool() 1106 if (!cpumask_test_cpu(cpu, ne_enclave->vcpu_ids)) { in ne_start_enclave_ioctl()
|
| /linux/arch/powerpc/include/asm/ |
| A D | tlb.h | 64 return cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm)); in mm_is_thread_local()
|