1 /*
2  * Copyright (C) 2013 Synopsys, Inc. (www.synopsys.com)
3  *
4  * Licensed under LGPL v2.1 or later, see the file COPYING.LIB in this tarball.
5  */
6 
7 #include <stdint.h>
8 #include <sysdep.h>
9 
10 typedef int8_t atomic8_t;
11 typedef uint8_t uatomic8_t;
12 typedef int_fast8_t atomic_fast8_t;
13 typedef uint_fast8_t uatomic_fast8_t;
14 
15 typedef int32_t atomic32_t;
16 typedef uint32_t uatomic32_t;
17 typedef int_fast32_t atomic_fast32_t;
18 typedef uint_fast32_t uatomic_fast32_t;
19 
20 typedef intptr_t atomicptr_t;
21 typedef uintptr_t uatomicptr_t;
22 typedef intmax_t atomic_max_t;
23 typedef uintmax_t uatomic_max_t;
24 
25 void __arc_link_error (void);
26 
27 #ifdef __A7__
28 #define atomic_full_barrier() __asm__ __volatile__("": : :"memory")
29 #define ARC_BARRIER_INSTR 	""
30 #else
31 #define atomic_full_barrier() __asm__ __volatile__("dmb 3": : :"memory")
32 #define ARC_BARRIER_INSTR 	"dmb 3"
33 #endif
34 
35 /* Atomic compare and exchange. */
36 
37 #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
38   ({ __arc_link_error (); oldval; })
39 
40 #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
41   ({ __arc_link_error (); oldval; })
42 
43 #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
44   ({ __arc_link_error (); oldval; })
45 
46 #ifdef __CONFIG_ARC_HAS_ATOMICS__
47 
48 #ifdef __A7__
49 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval)     \
50   ({									\
51 	__typeof(oldval) prev;						\
52 									\
53 	__asm__ __volatile__(						\
54 	"1:	llock   %0, [%1]	\n"				\
55 	"	brne    %0, %2, 2f	\n"				\
56 	"	scond   %3, [%1]	\n"				\
57 	"	bnz     1b		\n"				\
58 	"2:				\n"				\
59 	: "=&r"(prev)							\
60 	: "r"(mem), "ir"(oldval),					\
61 	  "r"(newval) /* can't be "ir". scond can't take limm for "b" */\
62 	: "cc", "memory");						\
63 									\
64 	prev;								\
65   })
66 #else /* !__A7__ */
67 #define USE_ATOMIC_COMPILER_BUILTINS 1
68 
69 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval)	\
70   ({									\
71     __typeof(*mem) __oldval = (oldval);					\
72     __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0,	\
73                                  __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);	\
74     __oldval;								\
75   })
76 
77 #define __arch_compare_and_exchange_val_8_rel(mem, newval, oldval) \
78   ({ __arc_link_error (); oldval; })
79 
80 #define __arch_compare_and_exchange_val_16_rel(mem, newval, oldval) \
81   ({ __arc_link_error (); oldval; })
82 
83 #define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
84   ({ __arc_link_error (); oldval; })
85 
86 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval)	\
87   ({									\
88     __typeof(*mem) __oldval = (oldval);					\
89     __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0,	\
90                                  __ATOMIC_RELEASE, __ATOMIC_RELAXED);	\
91     __oldval;								\
92   })
93 
94 /* Compare and exchange with "acquire" semantics, ie barrier after */
95 #define atomic_compare_and_exchange_val_acq(mem, new, old)		\
96   __atomic_val_bysize(__arch_compare_and_exchange_val, acq,		\
97                        mem, new, old)
98 
99 /* Compare and exchange with "release" semantics, ie barrier before */
100 #define atomic_compare_and_exchange_val_rel(mem, new, old)		\
101   __atomic_val_bysize(__arch_compare_and_exchange_val, rel,		\
102                        mem, new, old)
103 
104 /* Explicitly define here to use release semantics*/
105 #define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
106   ({									\
107      __typeof (oldval) __atg3_old = (oldval);				\
108      atomic_compare_and_exchange_val_rel (mem, newval, __atg3_old)	\
109        != __atg3_old;							\
110   })
111 
112 #endif /* __A7__ */
113 
114 #else /* !__CONFIG_ARC_HAS_ATOMICS__ */
115 
116 #ifndef __NR_arc_usr_cmpxchg
117 #error "__NR_arc_usr_cmpxchg missing: Please upgrade to kernel 4.9+ headers"
118 #endif
119 
120 /* With lack of hardware assist, use kernel to do the atomic operation
121    This will only work in a UP configuration
122  */
123 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval)     \
124   ({									\
125 	/* opecode INTERNAL_SYSCALL as it lacks cc clobber */		\
126 	register int __ret __asm__("r0") = (int)(mem);			\
127 	register int __a1 __asm__("r1") = (int)(oldval);		\
128 	register int __a2 __asm__("r2") = (int)(newval);		\
129 	register int _sys_num __asm__("r8") = __NR_arc_usr_cmpxchg;	\
130 									\
131         __asm__ volatile (						\
132 		ARC_TRAP_INSN						\
133 		: "+r" (__ret)						\
134 		: "r"(_sys_num), "r"(__ret), "r"(__a1), "r"(__a2)	\
135 		: "memory", "cc");					\
136 									\
137 	/* syscall returns previous value */				\
138 	/* Z bit is set if cmpxchg succeeded (we don't use that yet) */	\
139 									\
140 	(__typeof(oldval)) __ret;					\
141   })
142 
143 #endif
144 
145 /* Store NEWVALUE in *MEM and return the old value.
146    Atomic EX is present in all configurations
147  */
148 
149 #define __arch_exchange_32_acq(mem, newval)				\
150   ({									\
151 	__typeof__(*(mem)) val = newval;				\
152 									\
153 	__asm__ __volatile__(						\
154 	"ex %0, [%1]\n"							\
155 	ARC_BARRIER_INSTR						\
156 	: "+r" (val)							\
157 	: "r" (mem)							\
158 	: "memory" );							\
159 									\
160 	val;								\
161   })
162 
163 #define __arch_exchange_32_rel(mem, newval)				\
164   ({									\
165 	__typeof__(*(mem)) val = newval;				\
166 									\
167 	__asm__ __volatile__(						\
168 	ARC_BARRIER_INSTR"\n"						\
169 	"ex %0, [%1]"							\
170 	: "+r" (val)							\
171 	: "r" (mem)							\
172 	: "memory" );							\
173 									\
174 	val;								\
175   })
176 
177 #define atomic_exchange_acq(mem, newval)				\
178   ({									\
179 	if (sizeof(*(mem)) != 4)					\
180 		abort();						\
181 	__arch_exchange_32_acq(mem, newval);				\
182   })
183 
184 #define atomic_exchange_rel(mem, newval)				\
185   ({									\
186 	if (sizeof(*(mem)) != 4)					\
187 		abort();						\
188 	__arch_exchange_32_rel(mem, newval);				\
189   })
190