Home
last modified time | relevance | path

Searched refs:cpu_mask (Results 1 – 25 of 77) sorted by relevance

1234

/linux/arch/riscv/kernel/
A Dsbi.c40 for_each_cpu(cpuid, cpu_mask) { in __sbi_v01_cpumask_to_hartmask()
121 if (!cpu_mask || cpumask_empty(cpu_mask)) in __sbi_rfence_v01()
122 cpu_mask = cpu_online_mask; in __sbi_rfence_v01()
264 if (!cpu_mask || cpumask_empty(cpu_mask)) in __sbi_rfence_v02()
265 cpu_mask = cpu_online_mask; in __sbi_rfence_v02()
267 for_each_cpu(cpuid, cpu_mask) { in __sbi_rfence_v02()
332 cpu_mask, 0, 0, 0, 0); in sbi_remote_fence_i()
355 cpu_mask, start, size, 0, 0); in sbi_remote_sfence_vma_asid()
376 cpu_mask, start, size, 0, 0); in sbi_remote_hfence_gvma()
397 cpu_mask, start, size, vmid, 0); in sbi_remote_hfence_gvma_vmid()
[all …]
/linux/arch/powerpc/platforms/pseries/
A Dhotplug-cpu.c154 cpumask_var_t *cpu_mask) in find_cpu_id_range() argument
163 cpumask_clear(*cpu_mask); in find_cpu_id_range()
165 cpumask_set_cpu(cpu, *cpu_mask); in find_cpu_id_range()
189 while (!cpumask_empty(*cpu_mask)) { in find_cpu_id_range()
193 cpumask_shift_left(*cpu_mask, *cpu_mask, nthreads); in find_cpu_id_range()
196 if (!cpumask_empty(*cpu_mask)) in find_cpu_id_range()
215 cpumask_var_t cpu_mask; in pseries_add_processor() local
255 for_each_cpu(cpu, cpu_mask) { in pseries_add_processor()
270 cpu = cpumask_first(cpu_mask); in pseries_add_processor()
278 cpu_mask); in pseries_add_processor()
[all …]
/linux/scripts/gdb/linux/
A Dcpus.py52 cpu_mask = {} variable
56 global cpu_mask
57 cpu_mask = {}
64 global cpu_mask
66 if mask_name in cpu_mask:
67 mask = cpu_mask[mask_name]
71 cpu_mask[mask_name] = mask
/linux/lib/
A Dgroup_cpus.c132 const struct cpumask *cpu_mask, in alloc_nodes_groups() argument
147 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in alloc_nodes_groups()
251 const struct cpumask *cpu_mask, in __group_cpus_evenly() argument
260 if (cpumask_empty(cpu_mask)) in __group_cpus_evenly()
263 nodes = get_nodes_in_cpumask(node_to_cpumask, cpu_mask, &nodemsk); in __group_cpus_evenly()
272 cpumask_and(nmsk, cpu_mask, node_to_cpumask[n]); in __group_cpus_evenly()
287 alloc_nodes_groups(numgrps, node_to_cpumask, cpu_mask, in __group_cpus_evenly()
297 cpumask_and(nmsk, cpu_mask, node_to_cpumask[nv->id]); in __group_cpus_evenly()
/linux/arch/riscv/include/asm/
A Dsbi.h330 int sbi_remote_fence_i(const struct cpumask *cpu_mask);
332 int sbi_remote_sfence_vma_asid(const struct cpumask *cpu_mask,
336 int sbi_remote_hfence_gvma(const struct cpumask *cpu_mask,
339 int sbi_remote_hfence_gvma_vmid(const struct cpumask *cpu_mask,
343 int sbi_remote_hfence_vvma(const struct cpumask *cpu_mask,
346 int sbi_remote_hfence_vvma_asid(const struct cpumask *cpu_mask,
402 static inline int sbi_remote_fence_i(const struct cpumask *cpu_mask) { return -1; } in sbi_remote_fence_i() argument
/linux/tools/power/x86/amd_pstate_tracer/
A Damd_pstate_trace.py155 …, freq_ghz, mperf, aperf, tsc, common_comm, load, duration_ms, sample_num, elapsed_time, cpu_mask): argument
160 if cpu_mask[cpu_int] == 0:
185 def read_trace_data(file_name, cpu_mask): argument
234 …f, freq_ghz, mperf, aperf, tsc, common_comm, load, duration_ms, sample_num, elapsed_time, cpu_mask)
239 ipt.split_csv(current_max_cpu, cpu_mask)
263 cpu_mask = zeros((MAX_CPUS,), dtype=int) variable
297 cpu_mask[int(p)] = 1
300 cpu_mask[i] = 1
330 read_trace_data(file_name, cpu_mask)
/linux/arch/x86/kernel/cpu/resctrl/
A Drdtgroup.c387 cpumask_or(&prgrp->cpu_mask, &prgrp->cpu_mask, tmpmask); in cpus_mon_write()
401 cpumask_andnot(&crgrp->cpu_mask, &crgrp->cpu_mask, in cpus_mon_write()
417 cpumask_andnot(&r->cpu_mask, &r->cpu_mask, m); in cpumask_rdtgrp_clear()
420 cpumask_and(&crgrp->cpu_mask, &r->cpu_mask, &crgrp->cpu_mask); in cpumask_rdtgrp_clear()
470 cpumask_and(tmpmask, &rdtgrp->cpu_mask, &crgrp->cpu_mask); in cpus_ctrl_write()
472 cpumask_clear(&crgrp->cpu_mask); in cpus_ctrl_write()
2261 cpumask_var_t cpu_mask; in set_cache_qos_cfg() local
2285 cpumask_set_cpu(cpumask_any(&d->hdr.cpu_mask), cpu_mask); in set_cache_qos_cfg()
2291 free_cpumask_var(cpu_mask); in set_cache_qos_cfg()
2934 &rdtgroup_default.cpu_mask, &rdtgrp->cpu_mask); in rmdir_all_sub()
[all …]
A Dcore.c370 if (cpumask_test_cpu(cpu, &d->hdr.cpu_mask)) in get_ctrl_domain_from_cpu()
385 if (cpumask_test_cpu(cpu, &d->hdr.cpu_mask)) in get_mon_domain_from_cpu()
552 cpumask_set_cpu(cpu, &d->hdr.cpu_mask); in domain_add_cpu_ctrl()
565 cpumask_set_cpu(cpu, &d->hdr.cpu_mask); in domain_add_cpu_ctrl()
607 cpumask_set_cpu(cpu, &d->hdr.cpu_mask); in domain_add_cpu_mon()
624 cpumask_set_cpu(cpu, &d->hdr.cpu_mask); in domain_add_cpu_mon()
679 cpumask_clear_cpu(cpu, &d->hdr.cpu_mask); in domain_remove_cpu_ctrl()
680 if (cpumask_empty(&d->hdr.cpu_mask)) { in domain_remove_cpu_ctrl()
725 cpumask_clear_cpu(cpu, &d->hdr.cpu_mask); in domain_remove_cpu_mon()
726 if (cpumask_empty(&d->hdr.cpu_mask)) { in domain_remove_cpu_mon()
A Dmonitor.c276 int cpu = cpumask_any(&d->hdr.cpu_mask); in resctrl_arch_reset_rmid()
321 int cpu = cpumask_any(&d->hdr.cpu_mask); in resctrl_arch_rmid_read()
611 if (!cpumask_test_cpu(cpu, &rr->d->hdr.cpu_mask)) in __mon_event_count()
883 d->cqm_work_cpu = cpumask_any_housekeeping(&d->hdr.cpu_mask, in cqm_handle_limbo()
907 cpu = cpumask_any_housekeeping(&dom->hdr.cpu_mask, exclude_cpu); in cqm_setup_limbo_handler()
950 d->mbm_work_cpu = cpumask_any_housekeeping(&d->hdr.cpu_mask, in mbm_handle_overflow()
979 cpu = cpumask_any_housekeeping(&dom->hdr.cpu_mask, exclude_cpu); in mbm_setup_overflow_handler()
/linux/tools/power/x86/intel_pstate_tracer/
A Dintel_pstate_tracer.py326 …, freq_ghz, io_boost, common_comm, load, duration_ms, sample_num, elapsed_time, tsc_ghz, cpu_mask): argument
331 if cpu_mask[cpu_int] == 0:
345 def split_csv(current_max_cpu, cpu_mask): argument
350 if cpu_mask[int(index)] != 0:
420 def read_trace_data(filename, cpu_mask): argument
480 …c, freq_ghz, io_boost, common_comm, load, duration_ms, sample_num, elapsed_time, tsc_ghz, cpu_mask)
486 split_csv(current_max_cpu, cpu_mask)
511 cpu_mask = zeros((MAX_CPUS,), dtype=int) variable
544 cpu_mask[int(p)] = 1
547 cpu_mask[i] = 1
[all …]
/linux/arch/x86/events/amd/
A Dpower.c41 static cpumask_t cpu_mask; variable
149 return cpumap_print_to_pagebuf(true, buf, &cpu_mask); in get_attr_cpumask()
223 if (!cpumask_test_and_clear_cpu(cpu, &cpu_mask)) in power_cpu_exit()
233 cpumask_set_cpu(target, &cpu_mask); in power_cpu_exit()
254 cpumask_set_cpu(cpu, &cpu_mask); in power_cpu_init()
/linux/arch/arm/mach-omap2/
A Dclkt2xxx_virt_prcm_set.c42 static u16 cpu_mask; variable
82 if (!(ptr->flags & cpu_mask)) in omap2_round_to_table_rate()
106 if (!(prcm->flags & cpu_mask)) in omap2_select_table_rate()
179 if (!(prcm->flags & cpu_mask)) in omap2xxx_clkt_vps_check_bootloader_rates()
/linux/kernel/trace/
A Dpreemptirq_delay_test.c120 struct cpumask cpu_mask; in preemptirq_delay_run() local
123 cpumask_clear(&cpu_mask); in preemptirq_delay_run()
124 cpumask_set_cpu(cpu_affinity, &cpu_mask); in preemptirq_delay_run()
125 if (set_cpus_allowed_ptr(current, &cpu_mask)) in preemptirq_delay_run()
/linux/tools/testing/selftests/ring-buffer/
A Dmap_test.c140 cpu_set_t cpu_mask; in FIXTURE_SETUP() local
168 CPU_ZERO(&cpu_mask); in FIXTURE_SETUP()
169 CPU_SET(cpu, &cpu_mask); in FIXTURE_SETUP()
170 ASSERT_EQ(sched_setaffinity(0, sizeof(cpu_mask), &cpu_mask), 0); in FIXTURE_SETUP()
/linux/drivers/irqchip/
A Dirq-vf610-mscm-ir.c43 u16 cpu_mask; member
95 WARN_ON(irsprc & ~chip_data->cpu_mask); in vf610_mscm_ir_enable()
97 writew_relaxed(chip_data->cpu_mask, in vf610_mscm_ir_enable()
210 mscm_ir_data->cpu_mask = 0x1 << cpuid; in vf610_mscm_ir_of_init()
A Dirq-hip04.c272 unsigned int cpu_mask, cpu = smp_processor_id(); in hip04_irq_cpu_init() local
279 cpu_mask = hip04_get_cpumask(intc); in hip04_irq_cpu_init()
280 hip04_cpu_map[cpu] = cpu_mask; in hip04_irq_cpu_init()
288 hip04_cpu_map[i] &= ~cpu_mask; in hip04_irq_cpu_init()
/linux/tools/testing/selftests/bpf/prog_tests/
A Dcpu_mask.c33 const char *cpu_mask; member
65 err = parse_cpu_mask_str(test_cases[i].cpu_mask, &mask, &n); in test_cpu_mask()
/linux/tools/perf/tests/
A Dperf-record.c54 cpu_set_t cpu_mask; in test__PERF_RECORD() local
55 size_t cpu_mask_size = sizeof(cpu_mask); in test__PERF_RECORD()
114 err = sched__get_first_possible_cpu(evlist->workload.pid, &cpu_mask); in test__PERF_RECORD()
126 if (sched_setaffinity(evlist->workload.pid, cpu_mask_size, &cpu_mask) < 0) { in test__PERF_RECORD()
/linux/arch/powerpc/mm/nohash/
A Dtlb.c236 struct cpumask *cpu_mask; in __flush_tlb_page() local
250 cpu_mask = mm_cpumask(mm); in __flush_tlb_page()
269 smp_call_function_many(cpu_mask, in __flush_tlb_page()
/linux/arch/arm/mm/
A Dproc-arm9tdmi.S83 .macro arm9tdmi_proc_info name:req, cpu_val:req, cpu_mask:req, cpu_name:req
87 .long \cpu_mask
A Dproc-arm7tdmi.S89 .macro arm7tdmi_proc_info name:req, cpu_val:req, cpu_mask:req, cpu_name:req, \
94 .long \cpu_mask
/linux/drivers/soc/ti/
A Dknav_qmss_acc.c200 struct cpumask *cpu_mask; in knav_range_setup_acc_irq() local
207 cpu_mask = range->irqs[0].cpu_mask; in knav_range_setup_acc_irq()
211 cpu_mask = range->irqs[queue].cpu_mask; in knav_range_setup_acc_irq()
234 if (!ret && cpu_mask) { in knav_range_setup_acc_irq()
235 ret = irq_set_affinity_hint(irq, cpu_mask); in knav_range_setup_acc_irq()
/linux/arch/x86/hyperv/
A Dhv_apic.c201 ipi_arg.cpu_mask = 0; in __send_ipi_mask()
217 __set_bit(vcpu, (unsigned long *)&ipi_arg.cpu_mask); in __send_ipi_mask()
221 ipi_arg.cpu_mask); in __send_ipi_mask()
/linux/io_uring/
A Dio-wq.c125 cpumask_var_t cpu_mask; member
684 wq->cpu_mask); in io_wq_worker()
736 set_cpus_allowed_ptr(tsk, wq->cpu_mask); in io_init_new_worker()
1169 if (!alloc_cpumask_var(&wq->cpu_mask, GFP_KERNEL)) in io_wq_create()
1171 cpuset_cpus_allowed(data->task, wq->cpu_mask); in io_wq_create()
1200 free_cpumask_var(wq->cpu_mask); in io_wq_create()
1268 free_cpumask_var(wq->cpu_mask); in io_wq_destroy()
1291 cpumask_set_cpu(od->cpu, worker->wq->cpu_mask); in io_wq_worker_affinity()
1293 cpumask_clear_cpu(od->cpu, worker->wq->cpu_mask); in io_wq_worker_affinity()
1339 cpumask_copy(tctx->io_wq->cpu_mask, mask); in io_wq_cpu_affinity()
[all …]
/linux/arch/arm/mach-omap1/
A Dclock_data.c692 u32 cpu_mask; variable
716 cpu_mask = 0; in omap1_clk_init()
718 cpu_mask |= CK_1710; in omap1_clk_init()
720 cpu_mask |= CK_16XX; in omap1_clk_init()
722 cpu_mask |= CK_1510; in omap1_clk_init()
724 cpu_mask |= CK_310; in omap1_clk_init()
791 if (!(c->cpu & cpu_mask)) in omap1_clk_init()

Completed in 58 milliseconds

1234