Home
last modified time | relevance | path

Searched refs:cpumask (Results 1 – 25 of 250) sorted by relevance

12345678910

/drivers/opp/
A Dcpu.c107 void _dev_pm_opp_cpumask_remove_table(const struct cpumask *cpumask, in _dev_pm_opp_cpumask_remove_table() argument
113 WARN_ON(cpumask_empty(cpumask)); in _dev_pm_opp_cpumask_remove_table()
115 for_each_cpu(cpu, cpumask) { in _dev_pm_opp_cpumask_remove_table()
138 void dev_pm_opp_cpumask_remove_table(const struct cpumask *cpumask) in dev_pm_opp_cpumask_remove_table() argument
140 _dev_pm_opp_cpumask_remove_table(cpumask, -1); in dev_pm_opp_cpumask_remove_table()
155 const struct cpumask *cpumask) in dev_pm_opp_set_sharing_cpus() argument
166 for_each_cpu(cpu, cpumask) { in dev_pm_opp_set_sharing_cpus()
202 int dev_pm_opp_get_sharing_cpus(struct device *cpu_dev, struct cpumask *cpumask) in dev_pm_opp_get_sharing_cpus() argument
214 cpumask_clear(cpumask); in dev_pm_opp_get_sharing_cpus()
219 cpumask_set_cpu(opp_dev->dev->id, cpumask); in dev_pm_opp_get_sharing_cpus()
[all …]
A Dof.c1200 void dev_pm_opp_of_cpumask_remove_table(const struct cpumask *cpumask) in dev_pm_opp_of_cpumask_remove_table() argument
1202 _dev_pm_opp_cpumask_remove_table(cpumask, -1); in dev_pm_opp_of_cpumask_remove_table()
1212 int dev_pm_opp_of_cpumask_add_table(const struct cpumask *cpumask) in dev_pm_opp_of_cpumask_add_table() argument
1217 if (WARN_ON(cpumask_empty(cpumask))) in dev_pm_opp_of_cpumask_add_table()
1220 for_each_cpu(cpu, cpumask) { in dev_pm_opp_of_cpumask_add_table()
1246 _dev_pm_opp_cpumask_remove_table(cpumask, cpu); in dev_pm_opp_of_cpumask_add_table()
1270 struct cpumask *cpumask) in dev_pm_opp_of_get_sharing_cpus() argument
1282 cpumask_set_cpu(cpu_dev->id, cpumask); in dev_pm_opp_of_get_sharing_cpus()
1311 cpumask_set_cpu(cpu, cpumask); in dev_pm_opp_of_get_sharing_cpus()
1527 int dev_pm_opp_of_register_em(struct device *dev, struct cpumask *cpus) in dev_pm_opp_of_register_em()
/drivers/powercap/
A Didle_inject.c89 unsigned long cpumask[]; member
230 cpumask_pr_args(to_cpumask(ii_dev->cpumask))); in idle_inject_start()
260 cpumask_pr_args(to_cpumask(ii_dev->cpumask))); in idle_inject_stop()
279 for_each_cpu(cpu, to_cpumask(ii_dev->cpumask)) { in idle_inject_stop()
331 struct idle_inject_device *idle_inject_register_full(struct cpumask *cpumask, in idle_inject_register_full() argument
341 cpumask_copy(to_cpumask(ii_dev->cpumask), cpumask); in idle_inject_register_full()
346 for_each_cpu(cpu, to_cpumask(ii_dev->cpumask)) { in idle_inject_register_full()
359 for_each_cpu(cpu_rb, to_cpumask(ii_dev->cpumask)) { in idle_inject_register_full()
382 struct idle_inject_device *idle_inject_register(struct cpumask *cpumask) in idle_inject_register() argument
384 return idle_inject_register_full(cpumask, NULL); in idle_inject_register()
[all …]
/drivers/cpuidle/
A Dcpuidle-big_little.c144 struct cpumask *cpumask; in bl_idle_driver_init() local
147 cpumask = kzalloc(cpumask_size(), GFP_KERNEL); in bl_idle_driver_init()
148 if (!cpumask) in bl_idle_driver_init()
153 cpumask_set_cpu(cpu, cpumask); in bl_idle_driver_init()
155 drv->cpumask = cpumask; in bl_idle_driver_init()
227 kfree(bl_idle_big_driver.cpumask); in bl_idle_init()
229 kfree(bl_idle_little_driver.cpumask); in bl_idle_init()
A Ddriver.c53 for_each_cpu(cpu, drv->cpumask) { in __cpuidle_unset_driver()
73 for_each_cpu(cpu, drv->cpumask) { in __cpuidle_set_driver()
81 for_each_cpu(cpu, drv->cpumask) in __cpuidle_set_driver()
164 if (!drv->cpumask) in __cpuidle_driver_init()
165 drv->cpumask = (struct cpumask *)cpu_possible_mask; in __cpuidle_driver_init()
233 on_each_cpu_mask(drv->cpumask, cpuidle_setup_broadcast_timer, in __cpuidle_register_driver()
252 on_each_cpu_mask(drv->cpumask, cpuidle_setup_broadcast_timer, in __cpuidle_unregister_driver()
371 if (!drv->cpumask) { in cpuidle_driver_state_disabled()
376 for_each_cpu(cpu, drv->cpumask) { in cpuidle_driver_state_disabled()
A Ddt_idle_states.c97 const cpumask_t *cpumask) in idle_state_valid() argument
109 cpu = cpumask_first(cpumask) + 1; in idle_state_valid()
110 for_each_cpu_from(cpu, cpumask) { in idle_state_valid()
152 const cpumask_t *cpumask; in dt_init_idle_driver() local
163 cpumask = drv->cpumask ? : cpu_possible_mask; in dt_init_idle_driver()
164 cpu_node = of_cpu_device_node_get(cpumask_first(cpumask)); in dt_init_idle_driver()
182 if (!idle_state_valid(state_node, i, cpumask)) { in dt_init_idle_driver()
/drivers/md/
A Ddm-ps-io-affinity.c16 cpumask_var_t cpumask; member
36 free_cpumask_var(pi->cpumask); in ioa_free_path()
66 if (!zalloc_cpumask_var(&pi->cpumask, GFP_KERNEL)) { in ioa_add_path()
72 ret = cpumask_parse(argv[0], pi->cpumask); in ioa_add_path()
79 for_each_cpu(cpu, pi->cpumask) { in ioa_add_path()
105 free_cpumask_var(pi->cpumask); in ioa_add_path()
171 DMEMIT("%*pb ", cpumask_pr_args(pi->cpumask)); in ioa_status()
201 const struct cpumask *cpumask; in ioa_select_path() local
219 cpumask = cpumask_of_node(node); in ioa_select_path()
220 for_each_cpu(i, cpumask) { in ioa_select_path()
/drivers/infiniband/hw/hfi1/
A Daffinity.h28 struct cpumask mask;
29 struct cpumask used;
64 struct cpumask general_intr_mask;
65 struct cpumask comp_vect_mask;
71 struct cpumask real_cpu_mask;
/drivers/cpufreq/
A Dvexpress-spc-cpufreq.c280 const struct cpumask *cpumask) in _put_cluster_clk_and_freq_table() argument
292 const struct cpumask *cpumask) in put_cluster_clk_and_freq_table() argument
301 return _put_cluster_clk_and_freq_table(cpu_dev, cpumask); in put_cluster_clk_and_freq_table()
309 _put_cluster_clk_and_freq_table(cdev, cpumask); in put_cluster_clk_and_freq_table()
317 const struct cpumask *cpumask) in _get_cluster_clk_and_freq_table() argument
353 const struct cpumask *cpumask) in get_cluster_clk_and_freq_table() argument
362 ret = _get_cluster_clk_and_freq_table(cpu_dev, cpumask); in get_cluster_clk_and_freq_table()
378 ret = _get_cluster_clk_and_freq_table(cdev, cpumask); in get_cluster_clk_and_freq_table()
401 _put_cluster_clk_and_freq_table(cdev, cpumask); in get_cluster_clk_and_freq_table()
A Darmada-8k-cpufreq.c46 struct cpumask *cpumask) in armada_8k_get_sharing_cpus() argument
65 cpumask_set_cpu(cpu, cpumask); in armada_8k_get_sharing_cpus()
135 static struct cpumask cpus, shared_cpus; in armada_8k_cpufreq_init()
/drivers/nvdimm/
A Dnd_perf.c134 const struct cpumask *cpumask; in nvdimm_pmu_cpu_offline() local
157 cpumask = cpumask_of_node(nodeid); in nvdimm_pmu_cpu_offline()
158 target = cpumask_any_but(cpumask, cpu); in nvdimm_pmu_cpu_offline()
220 const struct cpumask *cpumask; in nvdimm_pmu_cpu_hotplug_init() local
233 cpumask = cpumask_of_node(nodeid); in nvdimm_pmu_cpu_hotplug_init()
234 nd_pmu->cpu = cpumask_any(cpumask); in nvdimm_pmu_cpu_hotplug_init()
/drivers/irqchip/
A Dirq-bcm6345-l1.c79 struct cpumask cpumask; member
113 return cpumask_first_and(&intc->cpumask, irq_data_get_affinity_mask(d)); in cpu_for_irq()
187 const struct cpumask *dest, in bcm6345_l1_set_affinity()
198 new_cpu = cpumask_first_and_and(&intc->cpumask, dest, cpu_online_mask); in bcm6345_l1_set_affinity()
309 cpumask_set_cpu(idx, &intc->cpumask); in bcm6345_l1_of_init()
312 if (cpumask_empty(&intc->cpumask)) { in bcm6345_l1_of_init()
329 for_each_cpu(idx, &intc->cpumask) { in bcm6345_l1_of_init()
A Dirq-hip04.c147 const struct cpumask *mask_val, in hip04_irq_set_affinity()
175 static void hip04_ipi_send_mask(struct irq_data *d, const struct cpumask *mask) in hip04_ipi_send_mask()
249 u32 cpumask; in hip04_irq_dist_init() local
258 cpumask = hip04_get_cpumask(intc); in hip04_irq_dist_init()
259 cpumask |= cpumask << 16; in hip04_irq_dist_init()
261 writel_relaxed(cpumask, base + GIC_DIST_TARGET + ((i * 2) & ~3)); in hip04_irq_dist_init()
/drivers/platform/x86/intel/speed_select_if/
A Disst_tpmi_core.c1246 static struct isst_perf_level_cpu_mask cpumask; in isst_if_get_perf_level_mask() local
1250 if (copy_from_user(&cpumask, argp, sizeof(cpumask))) in isst_if_get_perf_level_mask()
1253 power_domain_info = get_instance(cpumask.socket_id, cpumask.power_domain_id); in isst_if_get_perf_level_mask()
1261 cpumask.mask = mask; in isst_if_get_perf_level_mask()
1263 if (!cpumask.punit_cpu_map) in isst_if_get_perf_level_mask()
1266 if (copy_to_user(argp, &cpumask, sizeof(cpumask))) in isst_if_get_perf_level_mask()
1331 if (copy_from_user(&cpumask, argp, sizeof(cpumask))) in isst_if_get_base_freq_mask()
1334 power_domain_info = get_instance(cpumask.socket_id, cpumask.power_domain_id); in isst_if_get_base_freq_mask()
1342 cpumask.mask = mask; in isst_if_get_base_freq_mask()
1344 if (!cpumask.punit_cpu_map) in isst_if_get_base_freq_mask()
[all …]
/drivers/sh/intc/
A Dchip.c69 const struct cpumask *cpumask, in intc_set_affinity() argument
72 if (!cpumask_intersects(cpumask, cpu_online_mask)) in intc_set_affinity()
75 irq_data_update_affinity(data, cpumask); in intc_set_affinity()
/drivers/thermal/intel/
A Dx86_pkg_temp_thermal.c57 struct cpumask cpumask; member
360 cpumask_set_cpu(cpu, &zonedev->cpumask); in pkg_temp_thermal_device_add()
383 target = cpumask_any_but(&zonedev->cpumask, cpu); in pkg_thermal_cpu_offline()
384 cpumask_clear_cpu(cpu, &zonedev->cpumask); in pkg_thermal_cpu_offline()
472 cpumask_set_cpu(cpu, &zonedev->cpumask); in pkg_thermal_cpu_online()
/drivers/net/ethernet/mellanox/mlx5/core/
A Dmlx5_irq.h40 struct cpumask *mlx5_irq_get_affinity_mask(struct mlx5_irq *irq);
46 struct cpumask *used_cpus, u16 vecidx);
54 struct cpumask *used_cpus, u16 vecidx) in mlx5_irq_affinity_irq_request_auto()
/drivers/perf/
A Dstarfive_starlink_pmu.c75 cpumask_t cpumask; member
134 return cpumap_print_to_pagebuf(true, buf, &starlink_pmu->cpumask); in cpumask_show()
137 static DEVICE_ATTR_RO(cpumask);
392 event->cpu = cpumask_first(&starlink_pmu->cpumask); in starlink_pmu_event_init()
595 if (cpumask_empty(&starlink_pmu->cpumask)) in starlink_pmu_online_cpu()
596 cpumask_set_cpu(cpu, &starlink_pmu->cpumask); in starlink_pmu_online_cpu()
611 if (!cpumask_test_and_clear_cpu(cpu, &starlink_pmu->cpumask)) in starlink_pmu_offline_cpu()
620 cpumask_set_cpu(target, &starlink_pmu->cpumask); in starlink_pmu_offline_cpu()
A Dqcom_l3_pmu.c160 cpumask_t cpumask; member
519 event->cpu = cpumask_first(&l3pmu->cpumask); in qcom_l3_cache__event_init()
666 return cpumap_print_to_pagebuf(true, buf, &l3pmu->cpumask); in cpumask_show()
669 static DEVICE_ATTR_RO(cpumask);
699 if (cpumask_empty(&l3pmu->cpumask)) in qcom_l3_cache_pmu_online_cpu()
700 cpumask_set_cpu(cpu, &l3pmu->cpumask); in qcom_l3_cache_pmu_online_cpu()
710 if (!cpumask_test_and_clear_cpu(cpu, &l3pmu->cpumask)) in qcom_l3_cache_pmu_offline_cpu()
716 cpumask_set_cpu(target, &l3pmu->cpumask); in qcom_l3_cache_pmu_offline_cpu()
/drivers/thermal/
A Dthermal_trace.h116 TP_PROTO(const struct cpumask *cpus, unsigned int freq,
122 __bitmask(cpumask, num_possible_cpus())
129 __assign_bitmask(cpumask, cpumask_bits(cpus),
137 __get_bitmask(cpumask), __entry->freq, __entry->cdev_state,
A Dcpuidle_cooling.c188 ii_dev = idle_inject_register(drv->cpumask); in __cpuidle_cooling_register()
202 dev = get_cpu_device(cpumask_first(drv->cpumask)); in __cpuidle_cooling_register()
247 for_each_cpu(cpu, drv->cpumask) { in cpuidle_cooling_register()
/drivers/base/
A Darch_topology.c30 static struct cpumask scale_freq_counters_mask;
35 static bool supports_scale_freq_counters(const struct cpumask *cpus) in supports_scale_freq_counters()
64 const struct cpumask *cpus) in topology_set_scale_freq_source()
95 const struct cpumask *cpus) in topology_clear_scale_freq_source()
134 void topology_set_freq_scale(const struct cpumask *cpus, unsigned long cur_freq, in topology_set_freq_scale()
173 void topology_update_hw_pressure(const struct cpumask *cpus, in topology_update_hw_pressure()
698 const struct cpumask *cpu_coregroup_mask(int cpu) in cpu_coregroup_mask()
725 const struct cpumask *cpu_clustergroup_mask(int cpu) in cpu_clustergroup_mask()
/drivers/clocksource/
A Ddw_apb_timer.c108 cpumask_first(evt->cpumask)); in apbt_shutdown()
122 cpumask_first(evt->cpumask)); in apbt_set_oneshot()
155 cpumask_first(evt->cpumask)); in apbt_set_periodic()
179 cpumask_first(evt->cpumask)); in apbt_resume()
242 dw_ced->ced.cpumask = cpu < 0 ? cpu_possible_mask : cpumask_of(cpu); in dw_apb_clockevent_init()
/drivers/hwmon/
A Dcoretemp.c96 struct cpumask cpumask; member
716 if (!cpumask_intersects(&pdata->cpumask, topology_sibling_cpumask(cpu))) in coretemp_cpu_online()
719 cpumask_set_cpu(cpu, &pdata->cpumask); in coretemp_cpu_online()
741 cpumask_clear_cpu(cpu, &pd->cpumask); in coretemp_cpu_offline()
748 target = cpumask_any_and(&pd->cpumask, topology_sibling_cpumask(cpu)); in coretemp_cpu_offline()
761 if (cpumask_empty(&pd->cpumask)) { in coretemp_cpu_offline()
774 target = cpumask_first(&pd->cpumask); in coretemp_cpu_offline()
/drivers/virtio/
A Dvirtio_vdpa.c269 static struct cpumask *
273 struct cpumask *masks = NULL; in create_affinity_masks()
298 struct cpumask *result = group_cpus_evenly(this_vecs, &nr_masks); in create_affinity_masks()
332 struct cpumask *masks; in virtio_vdpa_find_vqs()
404 const struct cpumask *cpu_mask) in virtio_vdpa_set_vq_affinity()
417 static const struct cpumask *

Completed in 63 milliseconds

12345678910