Home
last modified time | relevance | path

Searched refs:cpu_mask (Results 1 – 25 of 25) sorted by relevance

/drivers/irqchip/
A Dirq-vf610-mscm-ir.c43 u16 cpu_mask; member
95 WARN_ON(irsprc & ~chip_data->cpu_mask); in vf610_mscm_ir_enable()
97 writew_relaxed(chip_data->cpu_mask, in vf610_mscm_ir_enable()
210 mscm_ir_data->cpu_mask = 0x1 << cpuid; in vf610_mscm_ir_of_init()
A Dirq-hip04.c272 unsigned int cpu_mask, cpu = smp_processor_id(); in hip04_irq_cpu_init() local
279 cpu_mask = hip04_get_cpumask(intc); in hip04_irq_cpu_init()
280 hip04_cpu_map[cpu] = cpu_mask; in hip04_irq_cpu_init()
288 hip04_cpu_map[i] &= ~cpu_mask; in hip04_irq_cpu_init()
A Dirq-gic.c491 unsigned int cpu_mask, cpu = smp_processor_id(); in gic_cpu_init() local
507 cpu_mask = gic_get_cpumask(gic); in gic_cpu_init()
508 gic_cpu_map[cpu] = cpu_mask; in gic_cpu_init()
516 gic_cpu_map[i] &= ~cpu_mask; in gic_cpu_init()
A Dirq-gic-v3-its.c1675 const struct cpumask *cpu_mask) in cpumask_pick_least_loaded() argument
1680 for_each_cpu(tmp, cpu_mask) { in cpumask_pick_least_loaded()
/drivers/soc/ti/
A Dknav_qmss_acc.c200 struct cpumask *cpu_mask; in knav_range_setup_acc_irq() local
207 cpu_mask = range->irqs[0].cpu_mask; in knav_range_setup_acc_irq()
211 cpu_mask = range->irqs[queue].cpu_mask; in knav_range_setup_acc_irq()
234 if (!ret && cpu_mask) { in knav_range_setup_acc_irq()
235 ret = irq_set_affinity_hint(irq, cpu_mask); in knav_range_setup_acc_irq()
A Dknav_qmss_queue.c126 if (range->irqs[queue].cpu_mask) { in knav_queue_setup_irq()
127 ret = irq_set_affinity_hint(irq, range->irqs[queue].cpu_mask); in knav_queue_setup_irq()
1251 range->irqs[i].cpu_mask = devm_kzalloc(dev, in knav_setup_queue_range()
1253 if (!range->irqs[i].cpu_mask) in knav_setup_queue_range()
1258 cpumask_set_cpu(bit, range->irqs[i].cpu_mask); in knav_setup_queue_range()
A Dknav_qmss.h325 struct cpumask *cpu_mask; member
/drivers/infiniband/hw/hfi1/
A Dnetdev.h89 struct cpumask *cpu_mask);
A Dnetdev_rx.c157 struct cpumask *cpu_mask) in hfi1_num_netdev_contexts() argument
176 cpumask_and(node_cpu_mask, cpu_mask, cpumask_of_node(dd->node)); in hfi1_num_netdev_contexts()
A Dsdma.h366 struct cpumask cpu_mask; member
A Dsdma.c932 if (cpumask_test_cpu(cpu, &sde->cpu_mask)) { in sdma_set_cpu_to_sde_map()
1037 cpumask_copy(&sde->cpu_mask, new_mask); in sdma_set_cpu_to_sde_map()
1049 if (cpumask_empty(&sde->cpu_mask)) in sdma_get_cpu_to_sde_map()
1052 cpumap_print_to_pagebuf(true, buf, &sde->cpu_mask); in sdma_get_cpu_to_sde_map()
/drivers/net/ethernet/broadcom/bnge/
A Dbnge_resc.h65 cpumask_var_t cpu_mask; member
/drivers/virtio/
A Dvirtio_pci_common.h145 int vp_set_vq_affinity(struct virtqueue *vq, const struct cpumask *cpu_mask);
A Dvirtio_pci_common.c560 int vp_set_vq_affinity(struct virtqueue *vq, const struct cpumask *cpu_mask) in vp_set_vq_affinity() argument
574 if (!cpu_mask) in vp_set_vq_affinity()
577 cpumask_copy(mask, cpu_mask); in vp_set_vq_affinity()
A Dvirtio_vdpa.c404 const struct cpumask *cpu_mask) in virtio_vdpa_set_vq_affinity() argument
412 return ops->set_vq_affinity(vdpa, index, cpu_mask); in virtio_vdpa_set_vq_affinity()
/drivers/block/drbd/
A Ddrbd_main.c471 static void drbd_calc_cpu_mask(cpumask_var_t *cpu_mask) in drbd_calc_cpu_mask() argument
483 for_each_cpu(cpu, resource->cpu_mask) in drbd_calc_cpu_mask()
496 cpumask_setall(*cpu_mask); in drbd_calc_cpu_mask()
499 cpumask_set_cpu(min_index, *cpu_mask); in drbd_calc_cpu_mask()
517 set_cpus_allowed_ptr(p, resource->cpu_mask); in drbd_thread_current_set_cpu()
2285 free_cpumask_var(resource->cpu_mask); in drbd_destroy_resource()
2467 if (nr_cpu_ids > 1 && res_opts->cpu_mask[0] != 0) { in set_resource_options()
2477 res_opts->cpu_mask, in set_resource_options()
2478 strlen(res_opts->cpu_mask) > 12 ? "..." : "", in set_resource_options()
2493 if (!cpumask_equal(resource->cpu_mask, new_cpu_mask)) { in set_resource_options()
[all …]
A Ddrbd_int.h607 cpumask_var_t cpu_mask; member
/drivers/net/dsa/xrs700x/
A Dxrs700x.c511 unsigned int i, cpu_mask = 0, mask = 0; in xrs700x_bridge_common() local
519 cpu_mask |= BIT(i); in xrs700x_bridge_common()
539 cpu_mask); in xrs700x_bridge_common()
/drivers/powercap/
A Dintel_rapl_common.c1880 cpumask_var_t cpu_mask; in cpumask_show() local
1884 if (!alloc_cpumask_var(&cpu_mask, GFP_KERNEL)) in cpumask_show()
1889 cpumask_clear(cpu_mask); in cpumask_show()
1895 cpumask_set_cpu(cpu, cpu_mask); in cpumask_show()
1899 ret = cpumap_print_to_pagebuf(true, buf, cpu_mask); in cpumask_show()
1901 free_cpumask_var(cpu_mask); in cpumask_show()
/drivers/vdpa/vdpa_user/
A Dvduse_dev.c739 const struct cpumask *cpu_mask) in vduse_vdpa_set_vq_affinity() argument
743 if (cpu_mask) in vduse_vdpa_set_vq_affinity()
744 cpumask_copy(&dev->vqs[idx]->irq_affinity, cpu_mask); in vduse_vdpa_set_vq_affinity()
/drivers/cpufreq/
A Dcpufreq.c3070 bool cpufreq_ready_for_eas(const struct cpumask *cpu_mask) in cpufreq_ready_for_eas() argument
3075 for_each_cpu(cpu, cpu_mask) { in cpufreq_ready_for_eas()
3078 cpumask_pr_args(cpu_mask)); in cpufreq_ready_for_eas()
/drivers/accel/habanalabs/common/
A Ddevice.c2867 static struct cpumask cpu_mask; in hl_init_cpu_for_irq() local
2872 if (!cpumask_and(&cpu_mask, cpumask_of_node(numa_node), cpu_online_mask)) { in hl_init_cpu_for_irq()
2878 for_each_cpu(i, &cpu_mask) in hl_init_cpu_for_irq()
/drivers/pci/controller/
A Dpci-hyperv.c225 u64 cpu_mask; member
1768 int_pkt->int_desc.cpu_mask = CPU_AFFINITY_ALL; in hv_compose_msi_req_v1()
/drivers/net/ethernet/broadcom/bnxt/
A Dbnxt.h1243 cpumask_var_t cpu_mask; member
A Dbnxt.c11528 cpumask_copy(irq->cpu_mask, mask); in bnxt_irq_affinity_notify()
11534 cpumask_first(irq->cpu_mask), &tag)) in bnxt_irq_affinity_notify()
11612 free_cpumask_var(irq->cpu_mask); in bnxt_free_irq()
11671 if (zalloc_cpumask_var(&irq->cpu_mask, GFP_KERNEL)) { in bnxt_request_irq()
11679 irq->cpu_mask); in bnxt_request_irq()
11680 rc = irq_update_affinity_hint(irq->vector, irq->cpu_mask); in bnxt_request_irq()
11692 cpumask_first(irq->cpu_mask), in bnxt_request_irq()

Completed in 177 milliseconds