Searched refs:cpu_smt_mask (Results 1 – 13 of 13) sorted by relevance
| /linux/drivers/platform/x86/intel/ifs/ |
| A D | runtest.c | 94 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested() 98 cpumask_pr_args(cpu_smt_mask(cpu))); in message_not_tested() 101 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested() 200 first = cpumask_first(cpu_smt_mask(cpu)); in doscan() 321 first = cpumask_first(cpu_smt_mask(cpu)); in do_array_test() 374 first = cpumask_first(cpu_smt_mask(cpu)); in do_array_test_gen1() 441 cpumask_pr_args(cpu_smt_mask(cpu)), in sbaf_message_not_tested() 445 cpumask_pr_args(cpu_smt_mask(cpu))); in sbaf_message_not_tested() 448 cpumask_pr_args(cpu_smt_mask(cpu)), in sbaf_message_not_tested() 461 cpumask_pr_args(cpu_smt_mask(cpu))); in sbaf_message_fail() [all …]
|
| /linux/arch/powerpc/include/asm/ |
| A D | smp.h | 140 #define cpu_smt_mask cpu_smt_mask macro 142 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
|
| /linux/include/linux/ |
| A D | topology.h | 236 #if defined(CONFIG_SCHED_SMT) && !defined(cpu_smt_mask) 237 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
|
| /linux/kernel/sched/ |
| A D | core_sched.c | 242 const struct cpumask *smt_mask = cpu_smt_mask(cpu_of(rq)); in __sched_core_account_forceidle()
|
| A D | core.c | 369 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_lock() 379 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_unlock() 399 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in __sched_core_flip() 6095 smt_mask = cpu_smt_mask(cpu); in pick_next_task() 6388 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_starting() 6427 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_deactivate() 8100 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_inc() 8108 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_dec()
|
| A D | topology.c | 1313 cpumask_andnot(mask, mask, cpu_smt_mask(cpu)); in init_sched_groups_capacity() 1704 { cpu_smt_mask, cpu_smt_flags, SD_INIT_NAME(SMT) },
|
| A D | fair.c | 1394 for_each_cpu(sibling, cpu_smt_mask(cpu)) { in is_core_idle() 7611 for_each_cpu(cpu, cpu_smt_mask(core)) { in __update_idle_core() 7634 for_each_cpu(cpu, cpu_smt_mask(core)) { in select_idle_core() 7653 cpumask_andnot(cpus, cpus, cpu_smt_mask(core)); in select_idle_core() 7664 for_each_cpu_and(cpu, cpu_smt_mask(target), p->cpus_ptr) { in select_idle_smt() 11645 cpumask_andnot(swb_cpus, swb_cpus, cpu_smt_mask(cpu)); in should_we_balance()
|
| A D | ext.c | 2754 const struct cpumask *smt_mask = cpu_smt_mask(cpu_of(rq)); in balance_scx() 3059 const struct cpumask *smt = cpu_smt_mask(cpu); in test_and_clear_cpu_idle() 3250 const struct cpumask *smt = cpu_smt_mask(cpu); in __scx_update_idle()
|
| A D | sched.h | 1412 for_each_cpu(cpu, cpu_smt_mask(cpu_of(rq))) { in sched_core_cookie_match()
|
| /linux/kernel/ |
| A D | stop_machine.c | 637 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in stop_core_cpuslocked()
|
| A D | workqueue.c | 7964 return cpumask_test_cpu(cpu0, cpu_smt_mask(cpu1)); in cpus_share_smt()
|
| /linux/arch/x86/kernel/ |
| A D | smpboot.c | 521 cpu_smt_mask, x86_smt_flags, SD_INIT_NAME(SMT) in build_sched_topology()
|
| /linux/arch/powerpc/kernel/ |
| A D | smp.c | 1705 cpu_smt_mask, powerpc_smt_flags, SD_INIT_NAME(SMT) in build_sched_topology()
|
Completed in 94 milliseconds