Home
last modified time | relevance | path

Searched refs:sample_period (Results 1 – 25 of 31) sorted by relevance

12

/arch/powerpc/perf/
A Dcore-fsl-emb.c309 if (event->hw.sample_period) { in fsl_emb_pmu_add()
380 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_start()
408 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_stop()
546 event->hw.last_period = event->hw.sample_period; in fsl_emb_pmu_event_init()
592 u64 period = event->hw.sample_period; in record_and_restart()
619 event->hw.last_period = event->hw.sample_period; in record_and_restart()
A Dcore-book3s.c604 event->attr.sample_period || in ebb_event_check()
1548 if (event->hw.sample_period) { in power_pmu_enable()
1761 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_start()
1791 if (!event->hw.idx || !event->hw.sample_period) in power_pmu_stop()
1904 || event->attr.sample_period) in can_go_on_limited_pmc()
2164 event->hw.last_period = event->hw.sample_period; in power_pmu_event_init()
2241 u64 period = event->hw.sample_period; in record_and_restart()
2280 event->hw.last_period = event->hw.sample_period; in record_and_restart()
A Dimc-pmu.c524 if (event->hw.sample_period) in nest_imc_event_init()
826 if (event->hw.sample_period) in core_imc_event_init()
993 if (event->hw.sample_period) in thread_imc_event_init()
1446 if (event->attr.sample_period == 0) in trace_imc_event_init()
/arch/x86/events/amd/
A Dibs.c105 s64 period = hwc->sample_period; in perf_event_set_period()
316 if (hwc->sample_period) { in perf_ibs_init()
322 hwc->sample_period = perf_ibs->min_period; in perf_ibs_init()
325 hwc->sample_period &= ~0x0FULL; in perf_ibs_init()
326 if (hwc->sample_period < perf_ibs->min_period) in perf_ibs_init()
344 event->attr.sample_period = period; in perf_ibs_init()
345 hwc->sample_period = period; in perf_ibs_init()
347 if (hwc->sample_period < perf_ibs->min_period) in perf_ibs_init()
366 hwc->last_period = hwc->sample_period; in perf_ibs_init()
481 hwc->sample_period = perf_ibs->min_period; in perf_ibs_start()
[all …]
A Dpower.c133 if (event->attr.sample_period) in pmu_event_init()
A Dbrs.c148 if (event->attr.sample_period <= x86_pmu.lbr_nr) in amd_brs_hw_config()
/arch/s390/kernel/
A Dperf_pai_ext.c285 if (a->sample_period && a->config != PAI_NNPA_BASE) in paiext_event_init()
291 if (a->sample_period) { in paiext_event_init()
307 if (a->sample_period) { in paiext_event_init()
308 a->sample_period = 1; in paiext_event_init()
368 if (!event->attr.sample_period) { /* Counting */ in paiext_start()
408 if (!event->attr.sample_period) { /* Counting */ in paiext_stop()
A Dperf_pai_crypto.c293 if (a->sample_period && a->config != PAI_CRYPTO_BASE) in paicrypt_event_init()
296 if (a->sample_period) { in paicrypt_event_init()
317 if (a->sample_period) { in paicrypt_event_init()
318 a->sample_period = 1; in paicrypt_event_init()
351 if (!event->attr.sample_period) { /* Counting */ in paicrypt_start()
389 if (!event->attr.sample_period) { /* Counting */ in paicrypt_stop()
A Dperf_cpum_sf.c590 hwc->sample_period = period; in hw_init_period()
591 hwc->last_period = hwc->sample_period; in hw_init_period()
592 local64_set(&hwc->period_left, hwc->sample_period); in hw_init_period()
724 rate = getrate(attr->freq, attr->sample_period, si); in __hw_perf_event_init_rate()
728 attr->sample_period = rate; in __hw_perf_event_init_rate()
1716 event->attr.sample_period = rate; in cpumsf_pmu_check_period()
A Dperf_cpum_cf.c1766 event->hw.sample_period = attr->sample_period; in cfdiag_event_init2()
1767 local64_set(&event->hw.period_left, event->hw.sample_period); in cfdiag_event_init2()
1769 event->hw.last_period = event->hw.sample_period; in cfdiag_event_init2()
/arch/riscv/kvm/
A Dvcpu_pmu.c40 u64 sample_period; in kvm_pmu_get_sample_period() local
43 sample_period = counter_val_mask; in kvm_pmu_get_sample_period()
45 sample_period = (-pmc->counter_val) & counter_val_mask; in kvm_pmu_get_sample_period()
47 return sample_period; in kvm_pmu_get_sample_period()
300 perf_event->attr.sample_period = period; in kvm_riscv_pmu_overflow()
301 perf_event->hw.sample_period = period; in kvm_riscv_pmu_overflow()
326 attr->sample_period = kvm_pmu_get_sample_period(pmc); in kvm_pmu_create_perf_event()
/arch/alpha/kernel/
A Dperf_event.c256 long period = hwc->sample_period; in alpha_perf_event_set_period()
678 if (!hwc->sample_period) { in __hw_perf_event_init()
679 hwc->sample_period = alpha_pmu->pmc_max_period[0]; in __hw_perf_event_init()
680 hwc->last_period = hwc->sample_period; in __hw_perf_event_init()
681 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
/arch/loongarch/kernel/
A Dperf_event.c305 u64 period = hwc->sample_period; in loongarch_pmu_event_set_period()
794 if (!hwc->sample_period) { in __hw_perf_event_init()
795 hwc->sample_period = loongarch_pmu.max_period; in __hw_perf_event_init()
796 hwc->last_period = hwc->sample_period; in __hw_perf_event_init()
797 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
/arch/x86/kvm/
A Dpmu.c169 u64 sample_period = (-counter_value) & pmc_bitmask(pmc); in get_sample_period() local
171 if (!sample_period) in get_sample_period()
172 sample_period = pmc_bitmask(pmc) + 1; in get_sample_period()
173 return sample_period; in get_sample_period()
194 attr.sample_period = get_sample_period(pmc, pmc->counter); in pmc_reprogram_counter()
203 attr.sample_period = 0; in pmc_reprogram_counter()
/arch/arc/kernel/
A Dperf_event.c335 hwc->sample_period = arc_pmu->max_period; in arc_pmu_event_init()
336 hwc->last_period = hwc->sample_period; in arc_pmu_event_init()
337 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init()
409 s64 period = hwc->sample_period; in arc_pmu_event_set_period()
/arch/mips/kernel/
A Dperf_event_mipsxx.c414 u64 period = hwc->sample_period; in mipspmu_event_set_period()
1527 if (!hwc->sample_period) { in __hw_perf_event_init()
1528 hwc->sample_period = mipspmu.max_period; in __hw_perf_event_init()
1529 hwc->last_period = hwc->sample_period; in __hw_perf_event_init()
1530 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init()
/arch/sparc/kernel/
A Dperf_event.c891 s64 period = hwc->sample_period; in sparc_perf_event_set_period()
1498 if (!hwc->sample_period) { in sparc_pmu_event_init()
1499 hwc->sample_period = MAX_PERIOD; in sparc_pmu_event_init()
1500 hwc->last_period = hwc->sample_period; in sparc_pmu_event_init()
1501 local64_set(&hwc->period_left, hwc->sample_period); in sparc_pmu_event_init()
/arch/x86/events/
A Dmsr.c213 if (event->attr.sample_period) /* no sampling */ in msr_event_init()
A Dcore.c495 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
496 hwc->last_period = hwc->sample_period; in x86_setup_perfctr()
497 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr()
639 s64 left = event->attr.sample_period; in x86_pmu_hw_config()
641 if (left > event->attr.sample_period) in x86_pmu_hw_config()
1390 s64 period = hwc->sample_period; in x86_perf_event_set_period()
/arch/arm64/kvm/
A Dpmu-emul.c556 perf_event->attr.sample_period = period; in kvm_pmu_perf_overflow()
557 perf_event->hw.sample_period = period; in kvm_pmu_perf_overflow()
751 attr.sample_period = compute_period(pmc, kvm_pmu_get_pmc_value(pmc)); in kvm_pmu_create_perf_event()
/arch/xtensa/kernel/
A Dperf_event.c172 s64 period = hwc->sample_period; in xtensa_perf_event_set_period()
/arch/arm/mach-imx/
A Dmmdc.c294 if (event->attr.sample_period) in mmdc_pmu_event_init()
/arch/x86/kernel/
A Dkgdb.c667 pevent[0]->hw.sample_period = 1; in kgdb_arch_late()
/arch/x86/events/intel/
A Dcstate.c284 if (event->attr.sample_period) /* no sampling */ in cstate_pmu_event_init()
A Duncore_snb.c918 if (hwc->sample_period) in snb_uncore_imc_event_init()
922 if (event->attr.sample_period) /* no sampling */ in snb_uncore_imc_event_init()

Completed in 87 milliseconds

12