| /drivers/perf/ |
| A D | marvell_pem_pmu.c | 238 u64 prev_count, new_count; in pem_perf_event_update() local 241 prev_count = local64_read(&hwc->prev_count); in pem_perf_event_update() 243 } while (local64_xchg(&hwc->prev_count, new_count) != prev_count); in pem_perf_event_update() 245 local64_add((new_count - prev_count), &event->count); in pem_perf_event_update() 258 local64_set(&hwc->prev_count, in pem_perf_event_start()
|
| A D | marvell_cn10k_ddr_pmu.c | 595 u64 prev_count, new_count, mask; in cn10k_ddr_perf_event_update() local 598 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_perf_event_update() 600 } while (local64_xchg(&hwc->prev_count, new_count) != prev_count); in cn10k_ddr_perf_event_update() 604 local64_add((new_count - prev_count) & mask, &event->count); in cn10k_ddr_perf_event_update() 613 local64_set(&hwc->prev_count, 0); in cn10k_ddr_perf_event_start() 733 local64_set(&hwc->prev_count, 0); in cn10k_ddr_perf_event_update_all() 856 u64 prev_count, new_count; in cn10k_ddr_pmu_overflow_handler() local 863 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_pmu_overflow_handler() 869 if (new_count < prev_count) in cn10k_ddr_pmu_overflow_handler() 876 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_pmu_overflow_handler() [all …]
|
| A D | arm_dmc620_pmu.c | 327 u64 delta, prev_count, new_count; in dmc620_pmu_event_update() local 331 prev_count = local64_read(&hwc->prev_count); in dmc620_pmu_event_update() 333 } while (local64_cmpxchg(&hwc->prev_count, in dmc620_pmu_event_update() 334 prev_count, new_count) != prev_count); in dmc620_pmu_event_update() 335 delta = (new_count - prev_count) & DMC620_CNT_MAX_PERIOD; in dmc620_pmu_event_update() 343 local64_set(&event->hw.prev_count, DMC620_CNT_INIT); in dmc620_pmu_event_set_period()
|
| A D | arm_dsu_pmu.c | 334 u64 delta, prev_count, new_count; in dsu_pmu_event_update() local 338 prev_count = local64_read(&hwc->prev_count); in dsu_pmu_event_update() 340 } while (local64_cmpxchg(&hwc->prev_count, prev_count, new_count) != in dsu_pmu_event_update() 341 prev_count); in dsu_pmu_event_update() 342 delta = (new_count - prev_count) & DSU_PMU_COUNTER_MASK(hwc->idx); in dsu_pmu_event_update() 368 local64_set(&event->hw.prev_count, val); in dsu_pmu_set_event_period()
|
| A D | qcom_l3_pmu.c | 206 local64_set(&event->hw.prev_count, 0); in qcom_l3_cache__64bit_counter_start() 247 prev = local64_read(&event->hw.prev_count); in qcom_l3_cache__64bit_counter_update() 253 } while (local64_cmpxchg(&event->hw.prev_count, prev, new) != prev); in qcom_l3_cache__64bit_counter_update() 285 local64_set(&event->hw.prev_count, 0); in qcom_l3_cache__32bit_counter_start() 323 prev = local64_read(&event->hw.prev_count); in qcom_l3_cache__32bit_counter_update() 325 } while (local64_cmpxchg(&event->hw.prev_count, prev, new) != prev); in qcom_l3_cache__32bit_counter_update()
|
| A D | riscv_pmu.c | 174 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_event_update() 176 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_event_update() 233 local64_set(&hwc->prev_count, (u64)-left); in riscv_pmu_event_set_period() 252 init_val = local64_read(&hwc->prev_count) & max_period; in riscv_pmu_start()
|
| A D | alibaba_uncore_drw_pmu.c | 303 prev = local64_read(&hwc->prev_count); in ali_drw_pmu_event_update() 305 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in ali_drw_pmu_event_update() 328 local64_set(&event->hw.prev_count, pre_val); in ali_drw_pmu_event_set_period() 587 local64_read(&event->hw.prev_count); in ali_drw_pmu_start()
|
| A D | thunderx2_pmu.c | 379 local64_set(&hwc->prev_count, 0); in uncore_start_event_l3c() 409 local64_set(&hwc->prev_count, 0); in uncore_start_event_dmc() 452 local64_set(&event->hw.prev_count, 0ULL); in uncore_start_event_ccpi2() 484 prev = local64_xchg(&hwc->prev_count, new); in tx2_uncore_event_update() 488 prev = local64_xchg(&hwc->prev_count, new); in tx2_uncore_event_update()
|
| A D | qcom_l2_pmu.c | 304 prev = local64_read(&hwc->prev_count); in l2_cache_event_update() 306 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in l2_cache_event_update() 335 local64_set(&hwc->prev_count, new); in l2_cache_cluster_set_period() 604 local64_set(&hwc->prev_count, 0); in l2_cache_event_add()
|
| A D | starfive_starlink_pmu.c | 167 local64_set(&hwc->prev_count, val); in starlink_pmu_set_event_period() 242 prev_raw_count = local64_read(&hwc->prev_count); in starlink_pmu_update() 250 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in starlink_pmu_update()
|
| A D | arm-ni.c | 359 prev = local64_read(&hw->prev_count); in arm_ni_event_read() 364 } while (local64_cmpxchg(&hw->prev_count, prev, count) != prev); in arm_ni_event_read() 390 local64_set(&cd->ccnt->hw.prev_count, S64_MIN); in arm_ni_init_ccnt() 396 local64_set(&cd->evcnt[idx]->hw.prev_count, S32_MIN); in arm_ni_init_evcnt()
|
| A D | arm-ccn.c | 863 u64 prev_count, new_count, mask; in arm_ccn_pmu_event_update() local 866 prev_count = local64_read(&hw->prev_count); in arm_ccn_pmu_event_update() 868 } while (local64_xchg(&hw->prev_count, new_count) != prev_count); in arm_ccn_pmu_event_update() 872 local64_add((new_count - prev_count) & mask, &event->count); in arm_ccn_pmu_event_update() 913 local64_set(&event->hw.prev_count, in arm_ccn_pmu_event_start()
|
| A D | arm_smmuv3_pmu.c | 262 prev = local64_read(&hwc->prev_count); in smmu_pmu_event_update() 264 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in smmu_pmu_event_update() 299 local64_set(&hwc->prev_count, new); in smmu_pmu_set_period() 500 local64_set(&hwc->prev_count, 0); in smmu_pmu_event_add()
|
| A D | arm_pmu.c | 233 local64_set(&hwc->prev_count, (u64)-left); in armpmu_event_set_period() 250 prev_raw_count = local64_read(&hwc->prev_count); in armpmu_event_update() 253 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in armpmu_event_update()
|
| A D | dwc_pcie_pmu.c | 335 prev = local64_read(&hwc->prev_count); in dwc_pcie_pmu_event_update() 342 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in dwc_pcie_pmu_event_update() 400 local64_set(&hwc->prev_count, 0); in dwc_pcie_pmu_event_start()
|
| A D | marvell_cn10k_tad_pmu.c | 60 prev = local64_read(&hwc->prev_count); in tad_pmu_event_counter_read() 64 } while (local64_cmpxchg(&hwc->prev_count, prev, new) != prev); in tad_pmu_event_counter_read()
|
| A D | arm-cci.c | 893 pmu_write_counter(cci_pmu, local64_read(&event->hw.prev_count), i); in __pmu_write_counters() 958 pmu_write_counter(cci_pmu, local64_read(&event->hw.prev_count), i); in cci5xx_pmu_write_counters() 976 prev_raw_count = local64_read(&hwc->prev_count); in pmu_event_update() 978 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in pmu_event_update() 1003 local64_set(&hwc->prev_count, val); in pmu_event_set_period()
|
| A D | riscv_pmu_legacy.c | 77 local64_set(&hwc->prev_count, initial_val); in pmu_legacy_ctr_start()
|
| /drivers/pps/generators/ |
| A D | pps_gen_tio.c | 47 u32 prev_count; member 86 tio->prev_count = 0; in pps_tio_disable() 144 if (tio->prev_count && tio->prev_count == event_count) in hrtimer_callback() 146 tio->prev_count = event_count; in hrtimer_callback()
|
| /drivers/iommu/intel/ |
| A D | perfmon.c | 305 u64 prev_count, new_count, delta; in iommu_pmu_event_update() local 309 prev_count = local64_read(&hwc->prev_count); in iommu_pmu_event_update() 311 if (local64_xchg(&hwc->prev_count, new_count) != prev_count) in iommu_pmu_event_update() 318 delta = (new_count << shift) - (prev_count << shift); in iommu_pmu_event_update() 344 local64_set((&hwc->prev_count), count); in iommu_pmu_start()
|
| /drivers/perf/hisilicon/ |
| A D | hisi_uncore_pmu.c | 308 local64_set(&hwc->prev_count, val); in hisi_uncore_pmu_set_event_period() 323 prev_raw_count = local64_read(&hwc->prev_count); in hisi_uncore_pmu_event_update() 324 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in hisi_uncore_pmu_event_update() 348 u64 prev_raw_count = local64_read(&hwc->prev_count); in hisi_uncore_pmu_start()
|
| A D | hisi_pcie_pmu.c | 452 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_event_update() 454 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, in hisi_pcie_pmu_event_update() 475 local64_set(&hwc->prev_count, HISI_PCIE_INIT_VAL); in hisi_pcie_pmu_set_period() 488 local64_set(&hwc->prev_count, cnt); in hisi_pcie_pmu_set_period() 550 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_start()
|
| /drivers/misc/ |
| A D | ntsync.c | 411 __u32 prev_count; in ntsync_sem_release() local 424 prev_count = sem->u.sem.count; in ntsync_sem_release() 434 if (!ret && put_user(prev_count, user_args)) in ntsync_sem_release() 461 __u32 prev_count; in ntsync_mutex_unlock() local 475 prev_count = mutex->u.mutex.count; in ntsync_mutex_unlock() 485 if (!ret && put_user(prev_count, &user_args->count)) in ntsync_mutex_unlock()
|
| /drivers/gpu/drm/xe/ |
| A D | xe_pmu.c | 321 prev = local64_read(&hwc->prev_count); in xe_pmu_event_update() 324 } while (!local64_try_cmpxchg(&hwc->prev_count, &prev, new)); in xe_pmu_event_update() 356 local64_set(&event->hw.prev_count, __xe_pmu_event_read(event)); in xe_pmu_enable()
|
| /drivers/dma/idxd/ |
| A D | perfmon.c | 217 prev_raw_count = local64_read(&hwc->prev_count); in perfmon_pmu_event_update() 220 } while (!local64_try_cmpxchg(&hwc->prev_count, in perfmon_pmu_event_update() 330 local64_set(&event->hw.prev_count, cntrdata); in perfmon_pmu_event_start()
|