1 /*
2  * Copyright (C) 2018-2022 Intel Corporation.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef _ATOMIC_H_
8 #define _ATOMIC_H_
9 
10 /* Test for GCC >= 4.7.0 */
11 #if ((__GNUC__ > 4) || (__GNUC__ == 4 && (__GNUC_MINOR__ >= 7)))
12 
13 /* Since GCC 4.7.0, the __atomic builtins are introduced as a replacement of the
14  * __sync ones. The original __sync builtins maps to their __atomic counter-part
15  * using the __ATOMIC_SEQ_CST model and will be eventually deprecated. */
16 
17 #define atomic_load(ptr)			\
18 	__atomic_load_n(ptr, __ATOMIC_SEQ_CST)
19 
20 #define atomic_store(ptr, val)				\
21 	__atomic_store_n(ptr, val, __ATOMIC_SEQ_CST)
22 
23 #define atomic_xchg(ptr, val)				\
24 	__atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST)
25 
26 /* Note: expected should also be a pointer. */
27 #define atomic_cmpxchg(ptr, expected, desired)				\
28 	__atomic_compare_exchange_n(ptr, expected, desired,		\
29 				false, __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE)
30 
31 #define atomic_add_fetch(ptr, val)			\
32 	__atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST)
33 #define atomic_sub_fetch(ptr, val)			\
34 	__atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST)
35 #define atomic_and_fetch(ptr, val)			\
36 	__atomic_and_fetch(ptr, val, __ATOMIC_SEQ_CST)
37 #define atomic_xor_fetch(ptr, val)			\
38 	__atomic_xor_fetch(ptr, val, __ATOMIC_SEQ_CST)
39 #define atomic_or_fetch(ptr, val)			\
40 	__atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST)
41 #define atomic_nand_fetch(ptr, val)			\
42 	__atomic_nand_fetch(ptr, val, __ATOMIC_SEQ_CST)
43 
44 #define atomic_fetch_add(ptr, val)			\
45 	__atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST)
46 #define atomic_fetch_sub(ptr, val)			\
47 	__atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST)
48 #define atomic_fetch_and(ptr, val)			\
49 	__atomic_fetch_and(ptr, val, __ATOMIC_SEQ_CST)
50 #define atomic_fetch_xor(ptr, val)			\
51 	__atomic_fetch_xor(ptr, val, __ATOMIC_SEQ_CST)
52 #define atomic_fetch_or(ptr, val)			\
53 	__atomic_fetch_or(ptr, val, __ATOMIC_SEQ_CST)
54 #define atomic_fetch_nand(ptr, val)			\
55 	__atomic_fetch_nand(ptr, val, __ATOMIC_SEQ_CST)
56 
57 #define atomic_test_and_set(ptr)		\
58 	__atomic_test_and_set(ptr, __ATOMIC_SEQ_CST)
59 #define atomic_clear(ptr)			\
60 	__atomic_clear(ptr, __ATOMIC_SEQ_CST)
61 #define atomic_thread_fence()			\
62 	__atomic_thread_fence(__ATOMIC_SEQ_CST)
63 #define atomic_signal_fence()			\
64 	__atomic_signal_fence(__ATOMIC_SEQ_CST)
65 
66 #else  /* not GCC >= 4.7.0 */
67 
68 /* __sync builtins do not have load/store interfaces. Use add_fetch and xchg to
69  * mimic their functinality.
70  *
71  * Also note that __sync_lock_test_and_set is rather an atomic exchange
72  * operation per GCC manual on the __sync builtins.
73  */
74 #define atomic_load(ptr)			\
75 	__sync_add_and_fetch(ptr, 0)
76 
77 #define atomic_store(ptr, val)			\
78 	(void)(__sync_lock_test_and_set(ptr, val))
79 
80 #define atomic_xchg(ptr, val)			\
81 	__sync_lock_test_and_set(ptr, val)
82 
83 /* Note: expected should also be a pointer. */
84 #define atomic_cmpxchg(ptr, expected, desired)			\
85 	__sync_bool_compare_and_swap(ptr, (*(expected)), desired)
86 
87 #define atomic_add_fetch(ptr, val)		\
88 	__sync_add_and_fetch(ptr, val)
89 #define atomic_sub_fetch(ptr, val)		\
90 	__sync_sub_and_fetch(ptr, val)
91 #define atomic_and_fetch(ptr, val)		\
92 	__sync_and_and_fetch(ptr, val)
93 #define atomic_xor_fetch(ptr, val)		\
94 	__sync_xor_and_fetch(ptr, val)
95 #define atomic_or_fetch(ptr, val)		\
96 	__sync_or_and_fetch(ptr, val)
97 #define atomic_nand_fetch(ptr, val)		\
98 	__sync_nand_and_fetch(ptr, val)
99 
100 #define atomic_fetch_add(ptr, val)		\
101 	__sync_fetch_and_add(ptr, val)
102 #define atomic_fetch_sub(ptr, val)		\
103 	__sync_fetch_and_sub(ptr, val)
104 #define atomic_fetch_and(ptr, val)		\
105 	__sync_fetch_and_and(ptr, val)
106 #define atomic_fetch_xor(ptr, val)		\
107 	__sync_fetch_and_xor(ptr, val)
108 #define atomic_fetch_or(ptr, val)		\
109 	__sync_fetch_and_or(ptr, val)
110 #define atomic_fetch_nand(ptr, val)		\
111 	__sync_fetch_and_nand(ptr, val)
112 
113 #define atomic_test_and_set(ptr)		\
114 	(bool)(__sync_lock_test_and_set(ptr, 1))
115 #define atomic_clear(ptr)			\
116 	__sync_lock_release(ptr)
117 #define atomic_thread_fence()			\
118 	__sync_synchronize()
119 #define atomic_signal_fence()			\
120 	__sync_synchronize()
121 
122 #endif /* GCC >= 4.7.0 */
123 
124 #endif /* _ATOMIC_H_ */
125