Home
last modified time | relevance | path

Searched refs:per_cpu_ptr (Results 1 – 25 of 367) sorted by relevance

12345678910>>...15

/linux/fs/xfs/
A Dxfs_stats.h167 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v++; \
168 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v++; \
173 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v--; \
174 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v--; \
179 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v += (inc); \
180 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v += (inc); \
185 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]++; \
186 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]++; \
191 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]; \
192 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]; \
[all …]
A Dxfs_stats.c15 val += *(((__u32 *)per_cpu_ptr(stats, cpu) + idx)); in counter_val()
73 xs_xstrat_bytes += per_cpu_ptr(stats, i)->s.xs_xstrat_bytes; in xfs_stats_format()
74 xs_write_bytes += per_cpu_ptr(stats, i)->s.xs_write_bytes; in xfs_stats_format()
75 xs_read_bytes += per_cpu_ptr(stats, i)->s.xs_read_bytes; in xfs_stats_format()
76 defer_relog += per_cpu_ptr(stats, i)->s.defer_relog; in xfs_stats_format()
102 vn_active = per_cpu_ptr(stats, c)->s.vn_active; in xfs_stats_clearall()
103 memset(per_cpu_ptr(stats, c), 0, sizeof(*stats)); in xfs_stats_clearall()
104 per_cpu_ptr(stats, c)->s.vn_active = vn_active; in xfs_stats_clearall()
/linux/include/linux/
A Dcontext_tracking_state.h66 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu()
73 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu_acquire()
85 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nesting_cpu()
97 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nmi_nesting_cpu()
A Dpart_stat.h29 (per_cpu_ptr((part)->bd_stats, (cpu))->field)
39 res += per_cpu_ptr((part)->bd_stats, _cpu)->field; \
48 memset(per_cpu_ptr(part->bd_stats, i), value, in part_stat_set_all()
/linux/drivers/infiniband/ulp/rtrs/
A Drtrs-clt-stats.c27 s = per_cpu_ptr(stats->pcpu_stats, con->cpu); in rtrs_clt_update_wc_stats()
47 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_from_cnt_to_str()
66 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_to_cnt_to_str()
90 r = &per_cpu_ptr(stats->pcpu_stats, cpu)->rdma; in rtrs_clt_stats_rdma_to_str()
119 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_rdma_stats()
135 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_cpu_migr_stats()
/linux/kernel/
A Dsmpboot.c172 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in __smpboot_create_thread()
197 *per_cpu_ptr(ht->store, cpu) = tsk; in __smpboot_create_thread()
230 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_unpark_thread()
249 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_park_thread()
272 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_destroy_threads()
276 *per_cpu_ptr(ht->store, cpu) = NULL; in smpboot_destroy_threads()
A Drelay.c204 *per_cpu_ptr(chan->buf, buf->cpu) = NULL; in relay_destroy_buf()
334 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_reset()
382 return *per_cpu_ptr(chan->buf, 0); in relay_open_buf()
406 *per_cpu_ptr(chan->buf, 0) = buf; in relay_open_buf()
440 if (*per_cpu_ptr(chan->buf, cpu)) in relay_prepare_cpu()
448 *per_cpu_ptr(chan->buf, cpu) = buf; in relay_prepare_cpu()
520 *per_cpu_ptr(chan->buf, i) = buf; in relay_open()
529 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_open()
594 buf = *per_cpu_ptr(chan->buf, 0); in relay_late_setup_files()
613 buf = *per_cpu_ptr(chan->buf, i); in relay_late_setup_files()
[all …]
A Dcpu.c173 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_callback()
311 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_wait_for_sync_state()
362 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_bp_sync_dead()
404 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_can_boot_ap()
794 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in bringup_wait_for_ap_online()
827 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_bringup_ap()
859 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in bringup_cpu()
1128 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_ap_callback()
1177 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_kick_ap_work()
1208 st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_init_state()
[all …]
/linux/kernel/irq/
A Dmatrix.c72 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_alloc_matrix()
144 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu()
165 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu_managed()
221 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_reserve_managed()
262 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_remove_managed()
305 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc_managed()
400 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc()
427 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_free()
510 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_debug_show()
/linux/kernel/sched/
A Dcpuacct.c97 u64 *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_cpuusage_read()
98 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_cpuusage_read()
137 u64 *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_cpuusage_write()
138 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_cpuusage_write()
270 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_stats_show()
278 cputime.sum_exec_runtime += *per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_stats_show()
342 *per_cpu_ptr(ca->cpuusage, cpu) += cputime; in cpuacct_charge()
A Dtopology.c937 sibling = *per_cpu_ptr(sdd->sd, i); in build_balance_mask()
1216 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
1540 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1546 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
2257 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
2264 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
2273 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
2284 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
2303 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
2428 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
[all …]
/linux/kernel/bpf/
A Dpercpu_freelist.c15 struct pcpu_freelist_head *head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_init()
66 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_push_nmi()
110 head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_populate()
128 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_pop()
160 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_pop_nmi()
/linux/arch/x86/events/amd/
A Duncore.c223 ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_event_init()
426 ctx = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_free()
438 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_free()
456 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_init()
468 prev = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_init()
498 *per_cpu_ptr(pmu->ctx, cpu) = curr; in amd_uncore_ctx_init()
520 curr = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_move()
526 next = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_move()
653 *per_cpu_ptr(uncore->info, cpu) = info; in amd_uncore_df_ctx_scan()
787 *per_cpu_ptr(uncore->info, cpu) = info; in amd_uncore_l3_ctx_scan()
[all …]
/linux/arch/s390/kernel/
A Dsmp.c411 if (pcpu_running(per_cpu_ptr(&pcpu_devices, cpu))) in arch_vcpu_is_preempted()
453 if (pcpu_stopped(per_cpu_ptr(&pcpu_devices, cpu))) in smp_emergency_stop()
484 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_send_stop()
558 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_store_status()
734 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_add_core()
934 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in __cpu_die()
980 ipl_pcpu = per_cpu_ptr(&pcpu_devices, 0); in smp_prepare_boot_cpu()
1040 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in cpu_configure_store()
1112 struct cpu *c = per_cpu_ptr(&cpu_devices, cpu); in smp_cpu_online()
1119 struct cpu *c = per_cpu_ptr(&cpu_devices, cpu); in smp_cpu_pre_down()
[all …]
A Dwti.c104 struct wti_state *st = per_cpu_ptr(&wti_state, cpu); in wti_pending()
133 st = per_cpu_ptr(&wti_state, cpu); in wti_show()
144 struct wti_state *st = per_cpu_ptr(&wti_state, cpu); in wti_thread_fn()
179 st = per_cpu_ptr(&wti_state, cpu); in wti_init()
/linux/fs/squashfs/
A Ddecompressor_multi_percpu.c39 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
53 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
70 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_destroy()
/linux/arch/x86/kernel/cpu/
A Daperfmperf.c383 per_cpu_ptr(arch_cpu_scale, cpu)->capacity = SCHED_CAPACITY_SCALE; in arch_enable_hybrid_capacity_scale()
384 per_cpu_ptr(arch_cpu_scale, cpu)->freq_ratio = arch_max_freq_ratio; in arch_enable_hybrid_capacity_scale()
414 WRITE_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->capacity, in arch_set_cpu_capacity()
416 WRITE_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->freq_ratio, in arch_set_cpu_capacity()
426 return READ_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->capacity); in arch_scale_cpu_capacity()
503 struct aperfmperf *s = per_cpu_ptr(&cpu_samples, cpu); in arch_freq_get_on_cpu()
/linux/drivers/clocksource/
A Dtimer-mp-csky.c78 struct timer_of *to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_starting_cpu()
145 to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_init()
168 to = per_cpu_ptr(&csky_to, cpu_rollback); in csky_mptimer_init()
/linux/drivers/powercap/
A Didle_inject.c108 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_wakeup()
154 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_fn()
280 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_stop()
311 per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_should_run()
/linux/tools/testing/shared/linux/
A Dpercpu.h10 #define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); (ptr); }) macro
11 #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
/linux/kernel/trace/
A Dtrace_functions_graph.c179 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_entry()
261 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_return()
420 last_pid = &(per_cpu_ptr(data->cpu_data, cpu)->last_pid); in verif_pid()
728 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_entry_leaf()
781 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_entry_nested()
1018 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_return()
1089 depth = per_cpu_ptr(data->cpu_data, iter->cpu)->depth; in print_graph_comment()
1155 if (data && per_cpu_ptr(data->cpu_data, cpu)->ignore) { in print_graph_function_flags()
1156 per_cpu_ptr(data->cpu_data, cpu)->ignore = 0; in print_graph_function_flags()
1169 per_cpu_ptr(data->cpu_data, iter->cpu)->ignore = 1; in print_graph_function_flags()
[all …]
/linux/drivers/irqchip/
A Dirq-riscv-imsic-state.c155 mlocal = per_cpu_ptr(imsic->global.local, mvec->cpu); in __imsic_local_sync()
241 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_mask()
263 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_unmask()
311 old_lpriv = per_cpu_ptr(imsic->lpriv, old_vec->cpu); in imsic_vector_move()
315 new_lpriv = per_cpu_ptr(imsic->lpriv, new_vec->cpu); in imsic_vector_move()
336 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_debug_show()
363 struct imsic_local_priv *lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_vector_from_local_id()
385 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_vector_alloc()
410 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_local_cleanup()
433 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_local_init()
[all …]
/linux/drivers/hwtracing/coresight/
A Dcoresight-trace-id.c45 return atomic_read(per_cpu_ptr(id_map->cpu_map, cpu)); in _coresight_trace_id_read_cpu_id()
127 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), 0); in coresight_trace_id_release_all()
161 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), id); in _coresight_trace_id_get_cpu_id()
184 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), 0); in _coresight_trace_id_put_cpu_id()
/linux/arch/x86/kernel/
A Dirq_64.c38 char *stack = (char *)per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack()
64 void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack()
/linux/drivers/hv/
A Dhv.c108 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc()
120 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc()
212 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_free()
265 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_enable_regs()
348 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_disable_regs()

Completed in 54 milliseconds

12345678910>>...15