| /linux/arch/alpha/kernel/ |
| A D | perf_event.c | 422 cpuc->idx_mask |= (1<<cpuc->current_idx[j]); in maybe_change_configuration() 424 cpuc->config = cpuc->event[0]->hw.config_base; in maybe_change_configuration() 462 if (!alpha_check_constraints(cpuc->event, cpuc->evtype, n0+1)) { in alpha_pmu_add() 464 cpuc->n_added++; in alpha_pmu_add() 503 cpuc->event[j - 1] = cpuc->event[j]; in alpha_pmu_del() 504 cpuc->evtype[j - 1] = cpuc->evtype[j]; in alpha_pmu_del() 547 if (cpuc->enabled) in alpha_pmu_stop() 568 if (cpuc->enabled) in alpha_pmu_start() 724 if (cpuc->enabled) in alpha_pmu_enable() 727 cpuc->enabled = 1; in alpha_pmu_enable() [all …]
|
| /linux/arch/x86/events/intel/ |
| A D | lbr.c | 204 if (cpuc->lbr_sel) in __intel_pmu_lbr_enable() 267 cpuc->last_log_id = 0; in intel_pmu_lbr_reset() 453 if (cpuc->lbr_select) in intel_pmu_lbr_restore() 539 if (cpuc->lbr_select) in intel_pmu_lbr_save() 615 if (!cpuc->lbr_users) in intel_pmu_lbr_sched_task() 655 cpuc->lbr_select = 1; in intel_pmu_lbr_add() 744 cpuc->lbr_users--; in intel_pmu_lbr_del() 825 if (cpuc->lbr_sel) { in intel_pmu_lbr_read_64() 1004 cpuc->lbr_users == cpuc->lbr_pebs_users) in intel_pmu_lbr_read() 1451 cpuc->lbr_entries[j-1] = cpuc->lbr_entries[j]; in intel_pmu_lbr_filter() [all …]
|
| A D | ds.c | 634 if (!cpuc->ds) in intel_pmu_disable_bts() 1002 if (cpuc->n_pebs == cpuc->n_pebs_via_pt) in pebs_needs_sched_cb() 1005 return cpuc->n_pebs && (cpuc->n_pebs == cpuc->n_large_pebs); in pebs_needs_sched_cb() 1032 if (cpuc->n_pebs == cpuc->n_large_pebs) { in pebs_update_threshold() 1166 cpuc->n_pebs++; in intel_pmu_pebs_add() 1229 if (cpuc->pebs_data_cfg != cpuc->active_pebs_data_cfg) { in intel_pmu_pebs_enable() 1231 cpuc->active_pebs_data_cfg = cpuc->pebs_data_cfg; in intel_pmu_pebs_enable() 1258 cpuc->n_pebs--; in intel_pmu_pebs_del() 1272 if (cpuc->n_pebs == cpuc->n_large_pebs && in intel_pmu_pebs_disable() 1273 cpuc->n_pebs != cpuc->n_pebs_via_pt) in intel_pmu_pebs_disable() [all …]
|
| A D | core.c | 2980 cpuc->enabled = 0; in intel_pmu_handle_irq() 3433 if (!cpuc->n_excl++) in intel_get_excl_constraints() 3503 if (cpuc->excl_cntrs) in intel_get_event_constraints() 3520 if (cpuc->is_fake) in intel_put_excl_constraints() 3528 if (!--cpuc->n_excl) in intel_put_excl_constraints() 3579 if (cpuc->excl_cntrs) in intel_put_event_constraints() 4370 cpuc->pmu = NULL; in init_hybrid_pmu() 4415 cpuc->lbr_sel = NULL; in intel_pmu_cpu_starting() 4454 cpuc->kfree_on_online[0] = cpuc->shared_regs; in intel_pmu_cpu_starting() 4464 cpuc->lbr_sel = &cpuc->shared_regs->regs[EXTRA_REG_LBR]; in intel_pmu_cpu_starting() [all …]
|
| A D | bts.c | 262 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start() local 273 bts->ds_back.bts_buffer_base = cpuc->ds->bts_buffer_base; in bts_event_start() 274 bts->ds_back.bts_absolute_maximum = cpuc->ds->bts_absolute_maximum; in bts_event_start() 275 bts->ds_back.bts_interrupt_threshold = cpuc->ds->bts_interrupt_threshold; in bts_event_start() 307 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop() local 332 cpuc->ds->bts_index = bts->ds_back.bts_buffer_base; in bts_event_stop() 333 cpuc->ds->bts_buffer_base = bts->ds_back.bts_buffer_base; in bts_event_stop() 334 cpuc->ds->bts_absolute_maximum = bts->ds_back.bts_absolute_maximum; in bts_event_stop() 335 cpuc->ds->bts_interrupt_threshold = bts->ds_back.bts_interrupt_threshold; in bts_event_stop() 522 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add() local [all …]
|
| A D | knc.c | 216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() local 221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq() 243 struct perf_event *event = cpuc->events[bit]; in knc_pmu_handle_irq() 247 if (!test_bit(bit, cpuc->active_mask)) in knc_pmu_handle_irq() 268 if (cpuc->enabled) in knc_pmu_handle_irq()
|
| A D | p4.c | 919 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() local 923 struct perf_event *event = cpuc->events[idx]; in p4_pmu_disable_all() 924 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_disable_all() 998 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() local 1002 struct perf_event *event = cpuc->events[idx]; in p4_pmu_enable_all() 1003 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_enable_all() 1012 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() local 1018 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq() 1023 if (!test_bit(idx, cpuc->active_mask)) { in p4_pmu_handle_irq() 1030 event = cpuc->events[idx]; in p4_pmu_handle_irq() [all …]
|
| /linux/arch/x86/events/ |
| A D | core.c | 773 return cpuc->pmu; in x86_get_pmu() 1145 cpuc->n_pair++; in collect_event() 1295 int n_running = cpuc->n_events - cpuc->n_added; in x86_pmu_enable() 1647 if (i >= cpuc->n_events - cpuc->n_added) in x86_pmu_del() 1654 cpuc->event_list[i-1] = cpuc->event_list[i]; in x86_pmu_del() 1655 cpuc->event_constraint[i-1] = cpuc->event_constraint[i]; in x86_pmu_del() 2326 kfree(cpuc); in free_fake_cpuc() 2334 cpuc = kzalloc(sizeof(*cpuc), GFP_KERNEL); in allocate_fake_cpuc() 2335 if (!cpuc) in allocate_fake_cpuc() 2353 return cpuc; in allocate_fake_cpuc() [all …]
|
| A D | perf_event.h | 754 (*get_event_constraints)(struct cpu_hw_events *cpuc, 758 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 761 void (*start_scheduling)(struct cpu_hw_events *cpuc); 765 void (*stop_scheduling)(struct cpu_hw_events *cpuc); 857 void (*lbr_read)(struct cpu_hw_events *cpuc); 1267 x86_get_event_constraints(struct cpu_hw_events *cpuc, int idx, 1270 extern int intel_cpuc_prepare(struct cpu_hw_events *cpuc, int cpu); 1271 extern void intel_cpuc_finish(struct cpu_hw_events *cpuc); 1371 void intel_pmu_lbr_read_32(struct cpu_hw_events *cpuc); 1373 void intel_pmu_lbr_read_64(struct cpu_hw_events *cpuc); [all …]
|
| /linux/arch/sparc/kernel/ |
| A D | perf_event.c | 977 cpuc->pcr[0] |= cpuc->event[0]->hw.config_base; in calculate_single_pcr() 1019 if (cpuc->n_added) in update_pcrs_for_enable() 1034 if (cpuc->enabled) in sparc_pmu_enable() 1037 cpuc->enabled = 1; in sparc_pmu_enable() 1055 cpuc->enabled = 0; in sparc_pmu_disable() 1133 cpuc->event[i - 1] = cpuc->event[i]; in sparc_pmu_del() 1134 cpuc->events[i - 1] = cpuc->events[i]; in sparc_pmu_del() 1402 if (sparc_check_constraints(cpuc->event, cpuc->events, n0 + 1)) in sparc_pmu_add() 1406 cpuc->n_events++; in sparc_pmu_add() 1407 cpuc->n_added++; in sparc_pmu_add() [all …]
|
| /linux/arch/x86/events/amd/ |
| A D | core.c | 356 struct amd_nb *nb = cpuc->amd_nb; in amd_has_nb() 385 struct amd_nb *nb = cpuc->amd_nb; in __amd_put_nb_event_constraints() 450 if (cpuc->is_fake) in __amd_get_nb_event_constraints() 517 WARN_ON_ONCE(cpuc->amd_nb); in amd_pmu_cpu_prepare() 523 if (!cpuc->amd_nb) in amd_pmu_cpu_prepare() 550 *onln = cpuc->amd_nb; in amd_pmu_cpu_starting() 551 cpuc->amd_nb = nb; in amd_pmu_cpu_starting() 556 cpuc->amd_nb->nb_id = nb_id; in amd_pmu_cpu_starting() 557 cpuc->amd_nb->refcnt++; in amd_pmu_cpu_starting() 897 --cpuc->n_pair; in amd_put_event_constraints_f17h() [all …]
|
| /linux/arch/sh/kernel/ |
| A D | perf_event.c | 201 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() local 207 cpuc->events[idx] = NULL; in sh_pmu_stop() 219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() local 229 cpuc->events[idx] = event; in sh_pmu_start() 236 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() local 239 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 246 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() local 253 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 254 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 258 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
| /linux/arch/arm/kernel/ |
| A D | perf_event_xscale.c | 149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq() local 174 struct perf_event *event = cpuc->events[idx]; in xscale1pmu_handle_irq() 275 xscale1pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale1pmu_get_event_idx() argument 280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx() 520 struct perf_event *event = cpuc->events[idx]; in xscale2pmu_handle_irq() 649 xscale2pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale2pmu_get_event_idx() argument 652 int idx = xscale1pmu_get_event_idx(cpuc, event); in xscale2pmu_get_event_idx() [all …]
|
| A D | perf_event_v6.c | 310 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv6pmu_handle_irq() local 327 struct perf_event *event = cpuc->events[idx]; in armv6pmu_handle_irq() 388 armv6pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_get_event_idx() argument 394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx() 414 static void armv6pmu_clear_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_clear_event_idx() argument 417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
|
| A D | perf_event_v7.c | 1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx() 1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx() 1630 idx = armv7pmu_get_event_idx(cpuc, event); in krait_pmu_get_event_idx() 1632 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx() 1647 armv7pmu_clear_event_idx(cpuc, event); in krait_pmu_clear_event_idx() 1650 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx() 1956 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx() 1960 idx = armv7pmu_get_event_idx(cpuc, event); in scorpion_pmu_get_event_idx() 1962 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx() 1977 armv7pmu_clear_event_idx(cpuc, event); in scorpion_pmu_clear_event_idx() [all …]
|
| /linux/arch/mips/kernel/ |
| A D | perf_event_mipsxx.c | 341 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 369 cpuc->saved_ctrl[idx] |= in mipsxx_pmu_enable_event() 374 cpuc->saved_ctrl[idx] |= M_TC_EN_ALL; in mipsxx_pmu_enable_event() 387 cpuc->saved_ctrl[idx] |= ctrl; in mipsxx_pmu_enable_event() 509 idx = mipsxx_pmu_alloc_counter(cpuc, hwc); in mipspmu_add() 521 cpuc->events[idx] = event; in mipspmu_add() 544 cpuc->events[idx] = NULL; in mipspmu_del() 545 clear_bit(idx, cpuc->used_mask); in mipspmu_del() 786 struct perf_event *event = cpuc->events[idx]; in handle_associated_event() 1601 if (!test_bit(n, cpuc->used_mask)) in mipsxx_pmu_handle_shared_irq() [all …]
|
| /linux/arch/riscv/kernel/ |
| A D | perf_event.c | 299 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_add() local 302 if (cpuc->n_events == riscv_pmu->num_counters) in riscv_pmu_add() 314 cpuc->events[hwc->idx] = event; in riscv_pmu_add() 315 cpuc->n_events++; in riscv_pmu_add() 330 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_del() local 333 cpuc->events[hwc->idx] = NULL; in riscv_pmu_del() 334 cpuc->n_events--; in riscv_pmu_del()
|
| /linux/arch/arm64/kernel/ |
| A D | perf_event.c | 804 struct perf_event *event = cpuc->events[idx]; in armv8pmu_handle_irq() 843 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx() 849 static int armv8pmu_get_chain_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_chain_idx() argument 859 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx() 861 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx() 864 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx() 870 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_event_idx() argument 888 return armv8pmu_get_chain_idx(cpuc, cpu_pmu); in armv8pmu_get_event_idx() 890 return armv8pmu_get_single_idx(cpuc, cpu_pmu); in armv8pmu_get_event_idx() 898 clear_bit(idx, cpuc->used_mask); in armv8pmu_clear_event_idx() [all …]
|
| /linux/arch/nds32/kernel/ |
| A D | perf_event_cpu.c | 230 struct pmu_hw_events *cpuc = cpu_pmu->get_hw_events(); in nds32_pmu_handle_irq() local 251 struct perf_event *event = cpuc->events[idx]; in nds32_pmu_handle_irq() 551 static int nds32_pmu_get_event_idx(struct pmu_hw_events *cpuc, in nds32_pmu_get_event_idx() argument 574 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx() 576 if (!test_and_set_bit(NDS32_IDX_COUNTER0, cpuc->used_mask)) in nds32_pmu_get_event_idx() 578 if (!test_and_set_bit(NDS32_IDX_COUNTER1, cpuc->used_mask)) in nds32_pmu_get_event_idx() 581 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx() 583 else if (!test_and_set_bit(NDS32_IDX_COUNTER1, cpuc->used_mask)) in nds32_pmu_get_event_idx() 586 (NDS32_IDX_CYCLE_COUNTER, cpuc->used_mask)) in nds32_pmu_get_event_idx() 589 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx()
|
| /linux/arch/x86/events/zhaoxin/ |
| A D | core.c | 357 struct cpu_hw_events *cpuc; in zhaoxin_pmu_handle_irq() local 362 cpuc = this_cpu_ptr(&cpu_hw_events); in zhaoxin_pmu_handle_irq() 387 struct perf_event *event = cpuc->events[bit]; in zhaoxin_pmu_handle_irq() 391 if (!test_bit(bit, cpuc->active_mask)) in zhaoxin_pmu_handle_irq() 422 zhaoxin_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in zhaoxin_get_event_constraints() argument
|
| /linux/kernel/rcu/ |
| A D | srcutree.c | 250 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_lock_idx() local 252 sum += READ_ONCE(cpuc->srcu_lock_count[idx]); in srcu_readers_lock_idx() 267 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_unlock_idx() local 269 sum += READ_ONCE(cpuc->srcu_unlock_count[idx]); in srcu_readers_unlock_idx() 336 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_active() local 338 sum += READ_ONCE(cpuc->srcu_lock_count[0]); in srcu_readers_active() 339 sum += READ_ONCE(cpuc->srcu_lock_count[1]); in srcu_readers_active() 340 sum -= READ_ONCE(cpuc->srcu_unlock_count[0]); in srcu_readers_active() 341 sum -= READ_ONCE(cpuc->srcu_unlock_count[1]); in srcu_readers_active()
|
| /linux/arch/csky/kernel/ |
| A D | perf_event.c | 1106 struct pmu_hw_events *cpuc = this_cpu_ptr(csky_pmu.hw_events); in csky_pmu_handle_irq() local 1124 struct perf_event *event = cpuc->events[idx]; in csky_pmu_handle_irq()
|