Home
last modified time | relevance | path

Searched refs:counter (Results 1 – 25 of 156) sorted by relevance

1234567

/arch/arm/include/asm/
A Datomic.h134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
180 val = v->counter; \
193 val = v->counter; \
216 ret = v->counter; in arch_atomic_cmpxchg()
218 v->counter = new; in arch_atomic_cmpxchg()
254 s64 counter; member
267 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
277 : "=Qo" (v->counter) in arch_atomic64_set()
289 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
299 prefetchw(&v->counter); in arch_atomic64_set()
[all …]
/arch/x86/kernel/cpu/
A Dperfctr-watchdog.c110 unsigned int counter; in reserve_perfctr_nmi() local
112 counter = nmi_perfctr_msr_to_bit(msr); in reserve_perfctr_nmi()
114 if (counter > NMI_MAX_COUNTER_BITS) in reserve_perfctr_nmi()
125 unsigned int counter; in release_perfctr_nmi() local
127 counter = nmi_perfctr_msr_to_bit(msr); in release_perfctr_nmi()
129 if (counter > NMI_MAX_COUNTER_BITS) in release_perfctr_nmi()
132 clear_bit(counter, perfctr_nmi_owner); in release_perfctr_nmi()
138 unsigned int counter; in reserve_evntsel_nmi() local
142 if (counter > NMI_MAX_COUNTER_BITS) in reserve_evntsel_nmi()
153 unsigned int counter; in release_evntsel_nmi() local
[all …]
/arch/arm64/include/asm/
A Darm_dsu_pmu.h53 static inline void __dsu_pmu_select_counter(int counter) in __dsu_pmu_select_counter() argument
55 write_sysreg_s(counter, CLUSTERPMSELR_EL1); in __dsu_pmu_select_counter()
59 static inline u64 __dsu_pmu_read_counter(int counter) in __dsu_pmu_read_counter() argument
61 __dsu_pmu_select_counter(counter); in __dsu_pmu_read_counter()
67 __dsu_pmu_select_counter(counter); in __dsu_pmu_write_counter()
74 __dsu_pmu_select_counter(counter); in __dsu_pmu_set_event()
92 write_sysreg_s(BIT(counter), CLUSTERPMCNTENCLR_EL1); in __dsu_pmu_disable_counter()
96 static inline void __dsu_pmu_enable_counter(int counter) in __dsu_pmu_enable_counter() argument
98 write_sysreg_s(BIT(counter), CLUSTERPMCNTENSET_EL1); in __dsu_pmu_enable_counter()
104 write_sysreg_s(BIT(counter), CLUSTERPMINTENSET_EL1); in __dsu_pmu_counter_interrupt_enable()
[all …]
/arch/s390/include/asm/
A Datomic.h20 return __atomic_read(&v->counter); in arch_atomic_read()
26 __atomic_set(&v->counter, i); in arch_atomic_set()
38 return __atomic_add_barrier(i, &v->counter); in arch_atomic_fetch_add()
44 __atomic_add(i, &v->counter); in arch_atomic_add()
50 __atomic_add_const(1, &v->counter); in arch_atomic_inc()
56 __atomic_add_const(-1, &v->counter); in arch_atomic_dec()
85 __atomic_##op(i, &v->counter); \
107 return arch_xchg(&v->counter, new); in ATOMIC_OPS()
133 __atomic64_set((long *)&v->counter, i); in arch_atomic64_set()
151 __atomic64_add(i, (long *)&v->counter); in arch_atomic64_add()
[all …]
/arch/x86/include/asm/
A Datomic64_64.h15 return __READ_ONCE((v)->counter); in arch_atomic64_read()
20 __WRITE_ONCE(v->counter, i); in arch_atomic64_set()
26 : "=m" (v->counter) in arch_atomic64_add()
33 : "=m" (v->counter) in arch_atomic64_sub()
46 : "=m" (v->counter) in arch_atomic64_inc()
54 : "=m" (v->counter) in arch_atomic64_dec()
79 return i + xadd(&v->counter, i); in arch_atomic64_add_return()
87 return xadd(&v->counter, i); in arch_atomic64_fetch_add()
114 : "+m" (v->counter) in arch_atomic64_and()
132 : "+m" (v->counter) in arch_atomic64_or()
[all …]
A Datomic.h23 return __READ_ONCE((v)->counter); in arch_atomic_read()
28 __WRITE_ONCE(v->counter, i); in arch_atomic_set()
34 : "+m" (v->counter) in arch_atomic_add()
41 : "+m" (v->counter) in arch_atomic_sub()
54 : "+m" (v->counter) :: "memory"); in arch_atomic_inc()
85 return i + xadd(&v->counter, i); in arch_atomic_add_return()
93 return xadd(&v->counter, i); in arch_atomic_fetch_add()
113 return arch_xchg(&v->counter, new); in arch_atomic_xchg()
120 : "+m" (v->counter) in arch_atomic_and()
138 : "+m" (v->counter) in arch_atomic_or()
[all …]
A Dlocal.h22 : "+m" (l->a.counter)); in local_inc()
28 : "+m" (l->a.counter)); in local_dec()
34 : "+m" (l->a.counter) in local_add()
41 : "+m" (l->a.counter) in local_sub()
56 return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i); in local_sub_and_test()
69 return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e); in local_dec_and_test()
82 return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e); in local_inc_and_test()
110 : "+r" (i), "+m" (l->a.counter) in local_add_return()
125 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
130 return try_cmpxchg_local(&l->a.counter, in local_try_cmpxchg()
[all …]
/arch/powerpc/include/asm/
A Datomic.h44 __asm__ __volatile__("stw %1,0(%2)" : "=m"(v->counter) : "r"(i), "b"(&v->counter)); in arch_atomic_set()
182 : "r" (&v->counter) in arch_atomic_dec_if_positive()
210 __asm__ __volatile__("std %1,0(%2)" : "=m"(v->counter) : "r"(i), "b"(&v->counter)); in arch_atomic64_set()
307 : "=&r" (t), "+m" (v->counter) in ATOMIC64_OPS()
308 : "r" (&v->counter) in ATOMIC64_OPS()
322 : "=&r" (t), "+m" (v->counter) in arch_atomic64_inc_return_relaxed()
323 : "r" (&v->counter) in arch_atomic64_inc_return_relaxed()
339 : "r" (&v->counter) in arch_atomic64_dec()
354 : "r" (&v->counter) in arch_atomic64_dec_return_relaxed()
381 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
[all …]
/arch/mips/include/asm/
A Dlocal.h48 : "Ir" (i), "m" (l->a.counter) in local_add_return()
54 result = l->a.counter; in local_add_return()
56 l->a.counter = result; in local_add_return()
82 : "Ir" (i), "m" (l->a.counter) in local_sub_return()
88 result = l->a.counter; in local_sub_return()
90 l->a.counter = result; in local_sub_return()
99 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
104 return try_cmpxchg_local(&l->a.counter, in local_try_cmpxchg()
105 (typeof(l->a.counter) *) old, new); in local_try_cmpxchg()
184 #define __local_inc(l) ((l)->a.counter++)
[all …]
A Datomic.h29 return READ_ONCE(v->counter); \
34 WRITE_ONCE(v->counter, i); \
53 v->counter c_op i; \
81 result = v->counter; \
83 v->counter = result; \
99 "+" GCC_OFF_SMALL_ASM() (v->counter) \
115 result = v->counter; \
116 v->counter c_op i; \
132 "+" GCC_OFF_SMALL_ASM() (v->counter) \
207 result = v->counter; \
[all …]
/arch/riscv/include/asm/
A Datomic.h29 return READ_ONCE(v->counter); in arch_atomic_read()
33 WRITE_ONCE(v->counter, i); in arch_atomic_set()
40 return READ_ONCE(v->counter); in arch_atomic64_read()
44 WRITE_ONCE(v->counter, i); in arch_atomic64_set()
59 : "+A" (v->counter) \
95 : "+A" (v->counter), "=r" (ret) \ in ATOMIC_OPS()
106 : "+A" (v->counter), "=r" (ret) \
208 : [p]"=&r" (_prev), [rc]"=&r" (_rc), [c]"+A" (counter) \
247 : [p]"=&r" (_prev), [rc]"=&r" (_rc), [c]"+A" (counter) \
308 _arch_atomic_dec_if_positive(prev, rc, v->counter, "w"); in arch_atomic_dec_if_positive()
[all …]
/arch/alpha/include/asm/
A Dlocal.h33 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_add_return()
34 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_add_return()
50 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_sub_return()
51 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_sub_return()
57 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
62 return try_cmpxchg_local(&l->a.counter, (s64 *)old, new); in local_try_cmpxchg()
65 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
104 #define __local_inc(l) ((l)->a.counter++)
105 #define __local_dec(l) ((l)->a.counter++)
106 #define __local_add(i,l) ((l)->a.counter+=(i))
[all …]
A Datomic.h29 #define arch_atomic_read(v) READ_ONCE((v)->counter)
30 #define arch_atomic64_read(v) READ_ONCE((v)->counter)
53 :"=&r" (temp), "=m" (v->counter) \
54 :"Ir" (i), "m" (v->counter)); \
70 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
71 :"Ir" (i), "m" (v->counter) : "memory"); \
89 :"Ir" (i), "m" (v->counter) : "memory"); \
106 :"=&r" (temp), "=m" (v->counter) \
107 :"Ir" (i), "m" (v->counter)); \
125 :"Ir" (i), "m" (v->counter) : "memory"); \
[all …]
/arch/arc/include/asm/
A Dspinlock.h101 : [rwlock] "r" (&(rw->counter)), in arch_read_lock()
125 : [rwlock] "r" (&(rw->counter)), in arch_read_trylock()
158 : [rwlock] "r" (&(rw->counter)), in arch_write_lock()
183 : [rwlock] "r" (&(rw->counter)), in arch_write_trylock()
209 : [rwlock] "r" (&(rw->counter)) in arch_read_unlock()
310 if (rw->counter > 0) { in arch_read_trylock()
311 rw->counter--; in arch_read_trylock()
336 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock()
337 rw->counter = 0; in arch_write_trylock()
364 rw->counter++; in arch_read_unlock()
[all …]
A Datomic64-arcv2.h12 s64 __aligned(8) counter;
24 : "r"(&v->counter)); in arch_atomic64_read()
45 : "r"(a), "r"(&v->counter) in arch_atomic64_set()
62 : "r"(&v->counter), "ir"(a) \
79 : "r"(&v->counter), "ir"(a) \
101 : "r"(&v->counter), "ir"(a) \
195 : "r"(&v->counter) in arch_atomic64_dec_if_positive()
221 : "r"(&v->counter), "r"(a), "r"(u) in arch_atomic64_fetch_add_unless()
A Datomic-spinlock.h25 WRITE_ONCE(v->counter, i); in arch_atomic_set()
37 v->counter c_op i; \
51 temp = v->counter; \
53 v->counter = temp; \
69 orig = v->counter; \
70 v->counter c_op i; \
/arch/loongarch/include/asm/
A Dlocal.h36 : "+ZB" (l->a.counter), "=&r" (result) in local_add_return()
50 : "+ZB" (l->a.counter), "=&r" (result) in local_sub_return()
61 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
66 return try_cmpxchg_local(&l->a.counter, in local_try_cmpxchg()
67 (typeof(l->a.counter) *) old, new); in local_try_cmpxchg()
146 #define __local_inc(l) ((l)->a.counter++)
147 #define __local_dec(l) ((l)->a.counter++)
148 #define __local_add(i, l) ((l)->a.counter += (i))
149 #define __local_sub(i, l) ((l)->a.counter -= (i))
A Datomic.h34 #define arch_atomic_read(v) READ_ONCE((v)->counter)
42 : "+ZB" (v->counter) \
54 : "+ZB" (v->counter), "=&r" (result) \
68 : "+ZB" (v->counter), "=&r" (result) \
146 [c]"=ZB" (v->counter) in ATOMIC_OPS()
169 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter) in arch_atomic_sub_if_positive()
181 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter) in arch_atomic_sub_if_positive()
202 : "+ZB" (v->counter) \
213 : "+ZB" (v->counter), "=&r" (result) \
227 : "+ZB" (v->counter), "=&r" (result) \
[all …]
/arch/parisc/include/asm/
A Datomic.h64 v->counter = i; in arch_atomic_set()
73 return READ_ONCE((v)->counter); in arch_atomic_read()
82 v->counter c_op i; \
93 ret = (v->counter c_op i); \
106 ret = v->counter; \
107 v->counter c_op i; \
154 v->counter c_op i; \
178 ret = v->counter; \
179 v->counter c_op i; \
222 v->counter = i; in arch_atomic64_set()
[all …]
A Dspinlock.h91 if (rw->counter > 0) { in arch_read_trylock()
92 rw->counter--; in arch_read_trylock()
117 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock()
118 rw->counter = 0; in arch_write_trylock()
145 rw->counter++; in arch_read_unlock()
156 rw->counter = __ARCH_RW_LOCK_UNLOCKED__; in arch_write_unlock()
/arch/arm64/kvm/
A Dpmu.c101 u32 counter; in kvm_vcpu_pmu_enable_el0() local
103 for_each_set_bit(counter, &events, ARMPMU_MAX_HWEVENTS) { in kvm_vcpu_pmu_enable_el0()
104 typer = kvm_vcpu_pmu_read_evtype_direct(counter); in kvm_vcpu_pmu_enable_el0()
106 kvm_vcpu_pmu_write_evtype_direct(counter, typer); in kvm_vcpu_pmu_enable_el0()
116 u32 counter; in kvm_vcpu_pmu_disable_el0() local
118 for_each_set_bit(counter, &events, ARMPMU_MAX_HWEVENTS) { in kvm_vcpu_pmu_disable_el0()
119 typer = kvm_vcpu_pmu_read_evtype_direct(counter); in kvm_vcpu_pmu_disable_el0()
121 kvm_vcpu_pmu_write_evtype_direct(counter, typer); in kvm_vcpu_pmu_disable_el0()
/arch/csky/include/asm/
A Datomic.h18 return READ_ONCE(v->counter); in arch_atomic_read()
22 WRITE_ONCE(v->counter, i); in arch_atomic_set()
36 : "r" (i), "r" (&v->counter) \
60 : "r" (i), "r"(&v->counter) \ in ATOMIC_OP()
120 : "r" (a), "r" (&v->counter), "r" (u)
144 : "r" (&v->counter) in arch_atomic_inc_unless_negative()
169 : "r" (&v->counter) in arch_atomic_dec_unless_positive()
191 : "r" (&v->counter) in arch_atomic_dec_if_positive()
/arch/sparc/lib/
A Datomic32.c38 ret = v->counter; \
39 v->counter c_op i; \
53 ret = (v->counter c_op i); \
76 ret = v->counter; in arch_atomic_xchg()
77 v->counter = new; in arch_atomic_xchg()
89 ret = v->counter; in arch_atomic_cmpxchg()
91 v->counter = new; in arch_atomic_cmpxchg()
104 ret = v->counter; in arch_atomic_fetch_add_unless()
106 v->counter += a; in arch_atomic_fetch_add_unless()
118 v->counter = i; in arch_atomic_set()
/arch/openrisc/include/asm/
A Datomic.h27 : "r"(&v->counter), "r"(i) \
44 : "r"(&v->counter), "r"(i) \
63 : "r"(&v->counter), "r"(i) \
121 : "r"(&v->counter), "r"(a), "r"(u) in ATOMIC_OP_RETURN()
128 #define arch_atomic_read(v) READ_ONCE((v)->counter)
129 #define arch_atomic_set(v,i) WRITE_ONCE((v)->counter, (i))
/arch/sh/include/asm/
A Datomic-irq.h19 v->counter c_op i; \
29 temp = v->counter; \
31 v->counter = temp; \
43 temp = v->counter; \
44 v->counter c_op i; \

Completed in 60 milliseconds

1234567