Lines Matching refs:per_cpu_ptr

910 		sibling = *per_cpu_ptr(sdd->sd, i);  in build_balance_mask()
971 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in init_overlap_sched_group()
1031 sibling = *per_cpu_ptr(sdd->sd, i); in build_overlap_sched_groups()
1181 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
1189 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
1190 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in get_group()
1495 WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); in claim_allocations()
1496 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1498 if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) in claim_allocations()
1499 *per_cpu_ptr(sdd->sds, cpu) = NULL; in claim_allocations()
1501 if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) in claim_allocations()
1502 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
1504 if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) in claim_allocations()
1505 *per_cpu_ptr(sdd->sgc, cpu) = NULL; in claim_allocations()
1547 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in sd_init()
1643 sd->shared = *per_cpu_ptr(sdd->sds, sd_id); in sd_init()
2202 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
2209 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
2218 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
2229 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
2248 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
2251 kfree(*per_cpu_ptr(sdd->sd, j)); in __sdt_free()
2255 kfree(*per_cpu_ptr(sdd->sds, j)); in __sdt_free()
2257 kfree(*per_cpu_ptr(sdd->sg, j)); in __sdt_free()
2259 kfree(*per_cpu_ptr(sdd->sgc, j)); in __sdt_free()
2374 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2384 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2404 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2459 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2469 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()