Home
last modified time | relevance | path

Searched refs:atomic64_t (Results 1 – 25 of 48) sorted by relevance

12

/arch/x86/include/asm/
A Datomic64_64.h13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
30 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub()
43 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc()
51 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
59 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test()
65 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test()
111 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v) in arch_atomic64_and()
129 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v) in arch_atomic64_or()
[all …]
A Datomic64_32.h13 } atomic64_t; typedef
43 #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
106 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg()
119 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
129 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
156 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return()
167 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return()
178 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
194 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc()
203 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
[all …]
/arch/s390/include/asm/
A Datomic.h125 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
131 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
137 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
143 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
149 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
155 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc()
161 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
173 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test()
179 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test()
185 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new) in arch_atomic64_xchg()
[all …]
A Dpci_dma.h89 atomic64_t mapped_pages;
90 atomic64_t unmapped_pages;
91 atomic64_t global_rpcits;
92 atomic64_t sync_map_rpcits;
93 atomic64_t sync_rpcits;
A Darchrandom.h20 extern atomic64_t s390_arch_random_counter;
/arch/arc/include/asm/
A Datomic64-arcv2.h13 } atomic64_t; typedef
17 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
29 static inline void arch_atomic64_set(atomic64_t *v, s64 a) in arch_atomic64_set()
50 static inline void arch_atomic64_##op(s64 a, atomic64_t *v) \
67 static inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
89 static inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
159 static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new) in arch_atomic64_xchg()
180 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
204 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
/arch/powerpc/include/asm/
A Datomic.h193 static __inline__ s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
206 static __inline__ void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
216 static __inline__ void arch_atomic64_##op(s64 a, atomic64_t *v) \
232 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
250 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
298 static __inline__ void arch_atomic64_inc(atomic64_t *v) in ATOMIC64_OPS()
313 static __inline__ s64 arch_atomic64_inc_return_relaxed(atomic64_t *v) in arch_atomic64_inc_return_relaxed()
329 static __inline__ void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
344 static __inline__ s64 arch_atomic64_dec_return_relaxed(atomic64_t *v) in arch_atomic64_dec_return_relaxed()
367 static __inline__ s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
[all …]
/arch/csky/include/asm/
A Dasid.h13 atomic64_t generation;
15 atomic64_t __percpu *active;
33 void asid_new_context(struct asid_info *info, atomic64_t *pasid,
43 atomic64_t *pasid, unsigned int cpu, in asid_check_context()
A Dmmu.h7 atomic64_t asid;
/arch/arm/include/asm/
A Datomic.h255 } atomic64_t; typedef
260 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
273 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
282 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
295 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
312 static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
332 arch_atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
355 arch_atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
434 static inline s64 arch_atomic64_xchg_relaxed(atomic64_t *ptr, s64 new) in arch_atomic64_xchg_relaxed()
454 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
[all …]
A Dmmu.h9 atomic64_t id;
/arch/sparc/include/asm/
A Datomic_64.h25 void arch_atomic64_##op(s64, atomic64_t *);
29 s64 arch_atomic64_##op##_return(s64, atomic64_t *);
33 s64 arch_atomic64_fetch_##op(s64, atomic64_t *);
70 s64 arch_atomic64_dec_if_positive(atomic64_t *v);
/arch/arm64/include/asm/
A Datomic_lse.h122 __lse_atomic64_##op(s64 i, atomic64_t *v) \
136 static __always_inline void __lse_atomic64_sub(s64 i, atomic64_t *v) in ATOMIC64_OP()
145 __lse_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \
176 __lse_atomic64_fetch_sub##name(s64 i, atomic64_t *v) \ in ATOMIC64_FETCH_OPS()
190 __lse_atomic64_add_return##name(s64 i, atomic64_t *v) \
196 __lse_atomic64_sub_return##name(s64 i, atomic64_t *v) \
208 static __always_inline void __lse_atomic64_and(s64 i, atomic64_t *v)
215 __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v) \
227 static __always_inline s64 __lse_atomic64_dec_if_positive(atomic64_t *v) in __lse_atomic64_dec_if_positive()
A Datomic.h59 static __always_inline void arch_##op(long i, atomic64_t *v) \
74 static __always_inline long arch_##op##name(long i, atomic64_t *v) \
97 static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
A Datomic_ll_sc.h123 __ll_sc_atomic64_##op(s64 i, atomic64_t *v) \ in ATOMIC_OPS()
140 __ll_sc_atomic64_##op##_return##name(s64 i, atomic64_t *v) \
161 __ll_sc_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \
218 __ll_sc_atomic64_dec_if_positive(atomic64_t *v)
/arch/parisc/include/asm/
A Datomic.h149 static __inline__ void arch_atomic64_##op(s64 i, atomic64_t *v) \
159 static __inline__ s64 arch_atomic64_##op##_return(s64 i, atomic64_t *v) \
172 static __inline__ s64 arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
217 arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
230 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
/arch/alpha/include/asm/
A Datomic.h95 static __inline__ void arch_atomic64_##op(s64 i, atomic64_t * v) \
112 arch_atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
132 arch_atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \
226 static __inline__ s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
249 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
/arch/riscv/include/asm/
A Datomic.h38 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
42 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
225 static __always_inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
316 static __always_inline bool arch_atomic64_inc_unless_negative(atomic64_t *v) in arch_atomic64_inc_unless_negative()
328 static __always_inline bool arch_atomic64_dec_unless_positive(atomic64_t *v) in arch_atomic64_dec_unless_positive()
340 static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
/arch/loongarch/include/asm/
A Datomic.h198 static inline void arch_atomic64_##op(long i, atomic64_t *v) \
208 static inline long arch_atomic64_##op##_return##suffix(long i, atomic64_t *v) \
221 static inline long arch_atomic64_fetch_##op##suffix(long i, atomic64_t *v) \
290 static inline long arch_atomic64_fetch_add_unless(atomic64_t *v, long a, long u) in ATOMIC64_OPS()
313 static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v) in arch_atomic64_sub_if_positive()
/arch/s390/crypto/
A Darch_random.c19 atomic64_t s390_arch_random_counter = ATOMIC64_INIT(0);
/arch/mips/include/asm/
A Dmmu.h12 atomic64_t mmid;
/arch/x86/kvm/mmu/
A Dtdp_iter.h30 atomic64_t *sptep_atomic = (atomic64_t *)rcu_dereference(sptep); in tdp_mmu_clear_spte_bits_atomic()
/arch/arm/include/asm/xen/
A Devents.h21 atomic64_t, \
/arch/arm64/kvm/
A Dvmid.c22 static atomic64_t vmid_generation;
25 static DEFINE_PER_CPU(atomic64_t, active_vmids);
/arch/csky/mm/
A Dcontext.c14 static DEFINE_PER_CPU(atomic64_t, active_asids);

Completed in 50 milliseconds

12