| /arch/x86/events/amd/ |
| A D | uncore.c | 50 int num_counters; member 212 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add() 221 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add() 257 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_del() 544 pmu->num_counters, in amd_uncore_ctx_init() 721 int num_counters; in amd_uncore_df_ctx_init() local 728 if (!num_counters) in amd_uncore_df_ctx_init() 744 pmu->num_counters = num_counters; in amd_uncore_df_ctx_init() 855 int num_counters; in amd_uncore_l3_ctx_init() local 862 if (!num_counters) in amd_uncore_l3_ctx_init() [all …]
|
| /arch/x86/events/intel/ |
| A D | uncore_snb.c | 330 .num_counters = 2, 348 .num_counters = 2, 408 .num_counters = 4, 446 .num_counters = 2, 473 .num_counters = 1, 487 .num_counters = 1, 591 .num_counters = 2, 603 .num_counters = 2, 617 .num_counters = 1, 644 .num_counters = 2, [all …]
|
| A D | uncore_snbep.c | 1048 .num_counters = 4, 1158 .num_counters = 4, 1271 .num_counters = 4, 1808 .num_counters = 4, 1831 .num_counters = 4, 1921 .num_counters = 4, 1944 .num_counters = 4, 2113 .num_counters = 2, 4831 .num_counters = 4, 5014 .num_counters = 4, [all …]
|
| A D | uncore.h | 52 int num_counters; member 182 unsigned int num_counters; member 478 return box->pmu->type->freerunning[type].num_counters; in uncore_num_freerunning() 499 return box->pmu->type->num_counters; in uncore_num_counters()
|
| A D | uncore_nhmex.c | 285 .num_counters = 1, 317 .num_counters = 6, 337 .num_counters = 4, 428 .num_counters = 4, 505 .num_counters = 4, 931 .num_counters = 6, 1194 .num_counters = 8,
|
| A D | uncore_discovery.h | 132 u8 num_counters; member
|
| A D | uncore_discovery.c | 254 type->num_counters = unit->num_regs; in uncore_insert_box_info() 717 uncore->num_counters = type->num_counters; in uncore_update_uncore_type()
|
| A D | uncore.c | 391 max_count = box->pmu->type->num_counters; in uncore_collect_events() 1009 __EVENT_CONSTRAINT(0, (1ULL << type->num_counters) - 1, in uncore_type_init() 1010 0, type->num_counters, 0, 0); in uncore_type_init()
|
| A D | core.c | 6894 x86_pmu.cntr_mask64 = GENMASK_ULL(eax.split.num_counters - 1, 0); in intel_pmu_init()
|
| /arch/loongarch/kernel/ |
| A D | perf_event.c | 135 unsigned int num_counters; member 257 for (i = 0; i < loongarch_pmu.num_counters; i++) { in loongarch_pmu_alloc_counter() 271 WARN_ON(idx < 0 || idx >= loongarch_pmu.num_counters); in loongarch_pmu_enable_event() 290 WARN_ON(idx < 0 || idx >= loongarch_pmu.num_counters); in loongarch_pmu_disable_event() 426 WARN_ON(idx < 0 || idx >= loongarch_pmu.num_counters); in loongarch_pmu_del() 507 for (n = 0; n < loongarch_pmu.num_counters; n++) { in pmu_handle_irq() 653 int counters = loongarch_pmu.num_counters; in reset_counters() 815 int ctr = loongarch_pmu.num_counters; in pause_local_counters() 830 int ctr = loongarch_pmu.num_counters; in resume_local_counters() 856 loongarch_pmu.num_counters = counters; in init_hw_perf_events()
|
| /arch/x86/kvm/svm/ |
| A D | pmu.c | 30 unsigned int num_counters = pmu->nr_arch_gp_counters; in amd_pmu_get_pmc() local 32 if (pmc_idx >= num_counters) in amd_pmu_get_pmc() 35 return &pmu->gp_counters[array_index_nospec(pmc_idx, num_counters)]; in amd_pmu_get_pmc()
|
| /arch/x86/kvm/vmx/ |
| A D | pmu_intel.c | 84 unsigned int num_counters; in intel_rdpmc_ecx_to_pmc() local 111 num_counters = pmu->nr_arch_fixed_counters; in intel_rdpmc_ecx_to_pmc() 116 num_counters = pmu->nr_arch_gp_counters; in intel_rdpmc_ecx_to_pmc() 124 if (idx >= num_counters) in intel_rdpmc_ecx_to_pmc() 128 return &counters[array_index_nospec(idx, num_counters)]; in intel_rdpmc_ecx_to_pmc() 535 pmu->nr_arch_gp_counters = min_t(int, eax.split.num_counters, in intel_pmu_refresh()
|
| /arch/mips/kernel/ |
| A D | perf_event_mipsxx.c | 90 unsigned int num_counters; member 329 for (i = mipspmu.num_counters - 1; i >= 0; i--) { in mipsxx_pmu_alloc_counter() 354 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipsxx_pmu_enable_event() 400 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipsxx_pmu_disable_event() 541 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipspmu_del() 646 (void *)(long)mipspmu.num_counters, 1); in hw_perf_event_destroy() 1548 int ctr = mipspmu.num_counters; in pause_local_counters() 1564 int ctr = mipspmu.num_counters; in resume_local_counters() 1576 unsigned int counters = mipspmu.num_counters; in mipsxx_pmu_handle_shared_irq() 2019 mipspmu.num_counters = counters; in init_hw_perf_events()
|
| /arch/powerpc/perf/ |
| A D | core-fsl-emb.c | 280 int num_counters = ppmu->n_counter; in fsl_emb_pmu_add() local 288 num_counters = ppmu->n_restricted; in fsl_emb_pmu_add() 294 for (i = num_counters - 1; i >= 0; i--) { in fsl_emb_pmu_add()
|
| /arch/x86/events/ |
| A D | rapl.c | 658 int num_counters = hweight32(rapl_pmus_pkg->cntr_mask); in rapl_advertise() local 661 num_counters += hweight32(rapl_pmus_core->cntr_mask); in rapl_advertise() 664 num_counters, rapl_timer_ms); in rapl_advertise()
|
| /arch/x86/include/asm/ |
| A D | perf_event.h | 161 unsigned int num_counters:8; member
|
| /arch/x86/events/zhaoxin/ |
| A D | core.c | 533 x86_pmu.cntr_mask64 = GENMASK_ULL(eax.split.num_counters - 1, 0); in zhaoxin_pmu_init()
|
| /arch/x86/kvm/ |
| A D | cpuid.c | 1439 eax.split.num_counters = kvm_pmu_cap.num_counters_gp; in __do_cpuid_func()
|