Lines Matching refs:cpu

52 	int cpu;  member
253 int cpu; in cblist_init_generic() local
270 for_each_possible_cpu(cpu) { in cblist_init_generic()
271 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in cblist_init_generic()
274 if (cpu) in cblist_init_generic()
279 rtpcp->cpu = cpu; in cblist_init_generic()
289 maxcpu = cpu; in cblist_init_generic()
425 int cpu; in rcu_barrier_tasks_generic() local
440 for_each_possible_cpu(cpu) { in rcu_barrier_tasks_generic()
441 if (cpu >= smp_load_acquire(&rtp->percpu_dequeue_lim)) in rcu_barrier_tasks_generic()
443 rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in rcu_barrier_tasks_generic()
461 int cpu; in rcu_tasks_need_gpcb() local
471 for (cpu = 0; cpu < dequeue_limit; cpu++) { in rcu_tasks_need_gpcb()
472 if (!cpu_possible(cpu)) in rcu_tasks_need_gpcb()
474 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in rcu_tasks_need_gpcb()
484 if (cpu > 0) in rcu_tasks_need_gpcb()
526 for (cpu = rtp->percpu_dequeue_lim; cpu < rcu_task_cpu_ids; cpu++) { in rcu_tasks_need_gpcb()
527 if (!cpu_possible(cpu)) in rcu_tasks_need_gpcb()
529 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in rcu_tasks_need_gpcb()
554 if (rtpcp_next->cpu < smp_load_acquire(&rtp->percpu_dequeue_lim)) { in rcu_tasks_invoke_cbs()
555 cpuwq = rcu_cpu_beenfullyonline(rtpcp_next->cpu) ? rtpcp_next->cpu : WORK_CPU_UNBOUND; in rcu_tasks_invoke_cbs()
560 if (rtpcp_next->cpu < smp_load_acquire(&rtp->percpu_dequeue_lim)) { in rcu_tasks_invoke_cbs()
561 cpuwq = rcu_cpu_beenfullyonline(rtpcp_next->cpu) ? rtpcp_next->cpu : WORK_CPU_UNBOUND; in rcu_tasks_invoke_cbs()
635 int cpu; in rcu_tasks_kthread() local
638 for_each_possible_cpu(cpu) { in rcu_tasks_kthread()
639 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in rcu_tasks_kthread()
725 int cpu; in show_rcu_tasks_generic_gp_kthread() local
730 for_each_possible_cpu(cpu) { in show_rcu_tasks_generic_gp_kthread()
731 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in show_rcu_tasks_generic_gp_kthread()
761 int cpu; in rcu_tasks_torture_stats_print_generic() local
776 for_each_possible_cpu(cpu) { in rcu_tasks_torture_stats_print_generic()
778 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rtp->rtpcpu, cpu); in rcu_tasks_torture_stats_print_generic()
781 cpumask_set_cpu(cpu, cm); in rcu_tasks_torture_stats_print_generic()
785 pr_cont(" %d:%ld", cpu, n); in rcu_tasks_torture_stats_print_generic()
983 int cpu; in rcu_tasks_is_holdout() local
1006 cpu = task_cpu(t); in rcu_tasks_is_holdout()
1009 if (t == idle_task(cpu) && !rcu_cpu_online(cpu)) in rcu_tasks_is_holdout()
1032 int cpu; in rcu_tasks_postscan() local
1058 for_each_possible_cpu(cpu) { in rcu_tasks_postscan()
1060 struct rcu_tasks_percpu *rtpcp = per_cpu_ptr(rcu_tasks.rtpcpu, cpu); in rcu_tasks_postscan()
1097 int cpu; in check_holdout_task() local
1116 cpu = task_cpu(t); in check_holdout_task()
1119 "N."[cpu < 0 || !tick_nohz_full_cpu(cpu)], in check_holdout_task()
1121 data_race(t->rcu_tasks_idle_cpu), cpu); in check_holdout_task()
1669 int cpu = task_cpu(t); in trc_inspect_reader() local
1671 bool ofl = cpu_is_offline(cpu); in trc_inspect_reader()
1683 if (!rcu_watching_zero_in_eqs(cpu, &t->trc_reader_nesting)) in trc_inspect_reader()
1717 int cpu; in trc_wait_for_one_reader() local
1749 cpu = task_cpu(t); in trc_wait_for_one_reader()
1752 if (per_cpu(trc_ipi_to_cpu, cpu) || t->trc_ipi_to_cpu >= 0) in trc_wait_for_one_reader()
1755 per_cpu(trc_ipi_to_cpu, cpu) = true; in trc_wait_for_one_reader()
1756 t->trc_ipi_to_cpu = cpu; in trc_wait_for_one_reader()
1758 if (smp_call_function_single(cpu, trc_read_check_handler, t, 0)) { in trc_wait_for_one_reader()
1762 __func__, cpu); in trc_wait_for_one_reader()
1764 per_cpu(trc_ipi_to_cpu, cpu) = false; in trc_wait_for_one_reader()
1799 int cpu; in rcu_tasks_trace_pregp_step() local
1805 for_each_possible_cpu(cpu) in rcu_tasks_trace_pregp_step()
1806 WARN_ON_ONCE(per_cpu(trc_ipi_to_cpu, cpu)); in rcu_tasks_trace_pregp_step()
1815 for_each_online_cpu(cpu) { in rcu_tasks_trace_pregp_step()
1827 t = cpu_curr_snapshot(cpu); in rcu_tasks_trace_pregp_step()
1837 for_each_possible_cpu(cpu) { in rcu_tasks_trace_pregp_step()
1838 rtpcp = per_cpu_ptr(rcu_tasks_trace.rtpcpu, cpu); in rcu_tasks_trace_pregp_step()
1895 int cpu; in show_stalled_task_trace() local
1903 cpu = task_cpu(t); in show_stalled_task_trace()
1914 ".N"[cpu >= 0 && tick_nohz_full_cpu(cpu)], in show_stalled_task_trace()
1919 cpu, cpu_online(cpu) ? "" : "(offline)"); in show_stalled_task_trace()
1926 int cpu; in show_stalled_ipi_trace() local
1928 for_each_possible_cpu(cpu) in show_stalled_ipi_trace()
1929 if (per_cpu(trc_ipi_to_cpu, cpu)) in show_stalled_ipi_trace()
1930 pr_alert("\tIPI outstanding to CPU %d\n", cpu); in show_stalled_ipi_trace()
1974 int cpu; in rcu_tasks_trace_postgp() local
1981 for_each_online_cpu(cpu) in rcu_tasks_trace_postgp()
1982 if (WARN_ON_ONCE(smp_load_acquire(per_cpu_ptr(&trc_ipi_to_cpu, cpu)))) in rcu_tasks_trace_postgp()
1983 smp_call_function_single(cpu, rcu_tasks_trace_empty_fn, NULL, 1); in rcu_tasks_trace_postgp()