| /arch/powerpc/perf/ |
| A D | core-fsl-emb.c | 309 if (event->hw.sample_period) { in fsl_emb_pmu_add() 380 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_start() 408 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_stop() 546 event->hw.last_period = event->hw.sample_period; in fsl_emb_pmu_event_init() 592 u64 period = event->hw.sample_period; in record_and_restart() 619 event->hw.last_period = event->hw.sample_period; in record_and_restart()
|
| A D | core-book3s.c | 604 event->attr.sample_period || in ebb_event_check() 1548 if (event->hw.sample_period) { in power_pmu_enable() 1761 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_start() 1791 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_stop() 1904 || event->attr.sample_period) in can_go_on_limited_pmc() 2164 event->hw.last_period = event->hw.sample_period; in power_pmu_event_init() 2241 u64 period = event->hw.sample_period; in record_and_restart() 2280 event->hw.last_period = event->hw.sample_period; in record_and_restart()
|
| A D | imc-pmu.c | 524 if (event->hw.sample_period) in nest_imc_event_init() 826 if (event->hw.sample_period) in core_imc_event_init() 993 if (event->hw.sample_period) in thread_imc_event_init() 1446 if (event->attr.sample_period == 0) in trace_imc_event_init()
|
| /arch/x86/events/amd/ |
| A D | ibs.c | 105 s64 period = hwc->sample_period; in perf_event_set_period() 316 if (hwc->sample_period) { in perf_ibs_init() 322 hwc->sample_period = perf_ibs->min_period; in perf_ibs_init() 325 hwc->sample_period &= ~0x0FULL; in perf_ibs_init() 326 if (hwc->sample_period < perf_ibs->min_period) in perf_ibs_init() 344 event->attr.sample_period = period; in perf_ibs_init() 345 hwc->sample_period = period; in perf_ibs_init() 347 if (hwc->sample_period < perf_ibs->min_period) in perf_ibs_init() 366 hwc->last_period = hwc->sample_period; in perf_ibs_init() 481 hwc->sample_period = perf_ibs->min_period; in perf_ibs_start() [all …]
|
| A D | power.c | 133 if (event->attr.sample_period) in pmu_event_init()
|
| A D | brs.c | 148 if (event->attr.sample_period <= x86_pmu.lbr_nr) in amd_brs_hw_config()
|
| /arch/s390/kernel/ |
| A D | perf_pai_ext.c | 285 if (a->sample_period && a->config != PAI_NNPA_BASE) in paiext_event_init() 291 if (a->sample_period) { in paiext_event_init() 307 if (a->sample_period) { in paiext_event_init() 308 a->sample_period = 1; in paiext_event_init() 368 if (!event->attr.sample_period) { /* Counting */ in paiext_start() 408 if (!event->attr.sample_period) { /* Counting */ in paiext_stop()
|
| A D | perf_pai_crypto.c | 293 if (a->sample_period && a->config != PAI_CRYPTO_BASE) in paicrypt_event_init() 296 if (a->sample_period) { in paicrypt_event_init() 317 if (a->sample_period) { in paicrypt_event_init() 318 a->sample_period = 1; in paicrypt_event_init() 351 if (!event->attr.sample_period) { /* Counting */ in paicrypt_start() 389 if (!event->attr.sample_period) { /* Counting */ in paicrypt_stop()
|
| A D | perf_cpum_sf.c | 590 hwc->sample_period = period; in hw_init_period() 591 hwc->last_period = hwc->sample_period; in hw_init_period() 592 local64_set(&hwc->period_left, hwc->sample_period); in hw_init_period() 724 rate = getrate(attr->freq, attr->sample_period, si); in __hw_perf_event_init_rate() 728 attr->sample_period = rate; in __hw_perf_event_init_rate() 1716 event->attr.sample_period = rate; in cpumsf_pmu_check_period()
|
| A D | perf_cpum_cf.c | 1766 event->hw.sample_period = attr->sample_period; in cfdiag_event_init2() 1767 local64_set(&event->hw.period_left, event->hw.sample_period); in cfdiag_event_init2() 1769 event->hw.last_period = event->hw.sample_period; in cfdiag_event_init2()
|
| /arch/riscv/kvm/ |
| A D | vcpu_pmu.c | 40 u64 sample_period; in kvm_pmu_get_sample_period() local 43 sample_period = counter_val_mask; in kvm_pmu_get_sample_period() 45 sample_period = (-pmc->counter_val) & counter_val_mask; in kvm_pmu_get_sample_period() 47 return sample_period; in kvm_pmu_get_sample_period() 300 perf_event->attr.sample_period = period; in kvm_riscv_pmu_overflow() 301 perf_event->hw.sample_period = period; in kvm_riscv_pmu_overflow() 326 attr->sample_period = kvm_pmu_get_sample_period(pmc); in kvm_pmu_create_perf_event()
|
| /arch/alpha/kernel/ |
| A D | perf_event.c | 256 long period = hwc->sample_period; in alpha_perf_event_set_period() 678 if (!hwc->sample_period) { in __hw_perf_event_init() 679 hwc->sample_period = alpha_pmu->pmc_max_period[0]; in __hw_perf_event_init() 680 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 681 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
|
| /arch/loongarch/kernel/ |
| A D | perf_event.c | 305 u64 period = hwc->sample_period; in loongarch_pmu_event_set_period() 794 if (!hwc->sample_period) { in __hw_perf_event_init() 795 hwc->sample_period = loongarch_pmu.max_period; in __hw_perf_event_init() 796 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 797 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
|
| /arch/x86/kvm/ |
| A D | pmu.c | 169 u64 sample_period = (-counter_value) & pmc_bitmask(pmc); in get_sample_period() local 171 if (!sample_period) in get_sample_period() 172 sample_period = pmc_bitmask(pmc) + 1; in get_sample_period() 173 return sample_period; in get_sample_period() 194 attr.sample_period = get_sample_period(pmc, pmc->counter); in pmc_reprogram_counter() 203 attr.sample_period = 0; in pmc_reprogram_counter()
|
| /arch/arc/kernel/ |
| A D | perf_event.c | 335 hwc->sample_period = arc_pmu->max_period; in arc_pmu_event_init() 336 hwc->last_period = hwc->sample_period; in arc_pmu_event_init() 337 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init() 409 s64 period = hwc->sample_period; in arc_pmu_event_set_period()
|
| /arch/mips/kernel/ |
| A D | perf_event_mipsxx.c | 414 u64 period = hwc->sample_period; in mipspmu_event_set_period() 1527 if (!hwc->sample_period) { in __hw_perf_event_init() 1528 hwc->sample_period = mipspmu.max_period; in __hw_perf_event_init() 1529 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 1530 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
|
| /arch/sparc/kernel/ |
| A D | perf_event.c | 891 s64 period = hwc->sample_period; in sparc_perf_event_set_period() 1498 if (!hwc->sample_period) { in sparc_pmu_event_init() 1499 hwc->sample_period = MAX_PERIOD; in sparc_pmu_event_init() 1500 hwc->last_period = hwc->sample_period; in sparc_pmu_event_init() 1501 local64_set(&hwc->period_left, hwc->sample_period); in sparc_pmu_event_init()
|
| /arch/x86/events/ |
| A D | msr.c | 213 if (event->attr.sample_period) /* no sampling */ in msr_event_init()
|
| A D | core.c | 495 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr() 496 hwc->last_period = hwc->sample_period; in x86_setup_perfctr() 497 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr() 639 s64 left = event->attr.sample_period; in x86_pmu_hw_config() 641 if (left > event->attr.sample_period) in x86_pmu_hw_config() 1390 s64 period = hwc->sample_period; in x86_perf_event_set_period()
|
| /arch/arm64/kvm/ |
| A D | pmu-emul.c | 556 perf_event->attr.sample_period = period; in kvm_pmu_perf_overflow() 557 perf_event->hw.sample_period = period; in kvm_pmu_perf_overflow() 751 attr.sample_period = compute_period(pmc, kvm_pmu_get_pmc_value(pmc)); in kvm_pmu_create_perf_event()
|
| /arch/xtensa/kernel/ |
| A D | perf_event.c | 172 s64 period = hwc->sample_period; in xtensa_perf_event_set_period()
|
| /arch/arm/mach-imx/ |
| A D | mmdc.c | 294 if (event->attr.sample_period) in mmdc_pmu_event_init()
|
| /arch/x86/kernel/ |
| A D | kgdb.c | 667 pevent[0]->hw.sample_period = 1; in kgdb_arch_late()
|
| /arch/x86/events/intel/ |
| A D | cstate.c | 284 if (event->attr.sample_period) /* no sampling */ in cstate_pmu_event_init()
|
| A D | uncore_snb.c | 918 if (hwc->sample_period) in snb_uncore_imc_event_init() 922 if (event->attr.sample_period) /* no sampling */ in snb_uncore_imc_event_init()
|