1 #ifndef __ASM_ARM32_CMPXCHG_H
2 #define __ASM_ARM32_CMPXCHG_H
3
4 #include <xen/bug.h>
5 #include <xen/prefetch.h>
6
7 extern void __bad_xchg(volatile void *ptr, int size);
8
__xchg(unsigned long x,volatile void * ptr,int size)9 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
10 {
11 unsigned long ret;
12 unsigned int tmp;
13
14 smp_mb();
15 prefetchw((const void *)ptr);
16
17 switch (size) {
18 case 1:
19 asm volatile("@ __xchg1\n"
20 "1: ldrexb %0, [%3]\n"
21 " strexb %1, %2, [%3]\n"
22 " teq %1, #0\n"
23 " bne 1b"
24 : "=&r" (ret), "=&r" (tmp)
25 : "r" (x), "r" (ptr)
26 : "memory", "cc");
27 break;
28 case 4:
29 asm volatile("@ __xchg4\n"
30 "1: ldrex %0, [%3]\n"
31 " strex %1, %2, [%3]\n"
32 " teq %1, #0\n"
33 " bne 1b"
34 : "=&r" (ret), "=&r" (tmp)
35 : "r" (x), "r" (ptr)
36 : "memory", "cc");
37 break;
38 default:
39 __bad_xchg(ptr, size), ret = 0;
40 break;
41 }
42 smp_mb();
43
44 return ret;
45 }
46
47 #define xchg(ptr,x) \
48 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
49
50 /*
51 * Atomic compare and exchange. Compare OLD with MEM, if identical,
52 * store NEW in MEM. Return the initial value in MEM. Success is
53 * indicated by comparing RETURN with OLD.
54 */
55
56 extern unsigned long __bad_cmpxchg(volatile void *ptr, int size);
57
58 #define __CMPXCHG_CASE(sz, name) \
59 static inline bool __cmpxchg_case_##name(volatile void *ptr, \
60 unsigned long *old, \
61 unsigned long new, \
62 bool timeout, \
63 unsigned int max_try) \
64 { \
65 unsigned long oldval; \
66 unsigned long res; \
67 \
68 do { \
69 asm volatile("@ __cmpxchg_case_" #name "\n" \
70 " ldrex" #sz " %1, [%2]\n" \
71 " mov %0, #0\n" \
72 " teq %1, %3\n" \
73 " strex" #sz "eq %0, %4, [%2]\n" \
74 : "=&r" (res), "=&r" (oldval) \
75 : "r" (ptr), "Ir" (*old), "r" (new) \
76 : "memory", "cc"); \
77 \
78 if (!res) \
79 break; \
80 } while (!timeout || ((--max_try) > 0)); \
81 \
82 *old = oldval; \
83 \
84 return !res; \
85 }
86
87 __CMPXCHG_CASE(b, 1)
88 __CMPXCHG_CASE(h, 2)
89 __CMPXCHG_CASE( , 4)
90
__cmpxchg_case_8(volatile uint64_t * ptr,uint64_t * old,uint64_t new,bool timeout,unsigned int max_try)91 static inline bool __cmpxchg_case_8(volatile uint64_t *ptr,
92 uint64_t *old,
93 uint64_t new,
94 bool timeout,
95 unsigned int max_try)
96 {
97 uint64_t oldval;
98 uint64_t res;
99
100 do {
101 asm volatile(
102 " ldrexd %1, %H1, [%3]\n"
103 " teq %1, %4\n"
104 " teqeq %H1, %H4\n"
105 " movne %0, #0\n"
106 " movne %H0, #0\n"
107 " bne 2f\n"
108 " strexd %0, %5, %H5, [%3]\n"
109 "2:"
110 : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
111 : "r" (ptr), "r" (*old), "r" (new)
112 : "memory", "cc");
113 if (!res)
114 break;
115 } while (!timeout || ((--max_try) > 0));
116
117 *old = oldval;
118
119 return !res;
120 }
121
__int_cmpxchg(volatile void * ptr,unsigned long * old,unsigned long new,int size,bool timeout,unsigned int max_try)122 static always_inline bool __int_cmpxchg(volatile void *ptr, unsigned long *old,
123 unsigned long new, int size,
124 bool timeout, unsigned int max_try)
125 {
126 prefetchw((const void *)ptr);
127
128 switch (size) {
129 case 1:
130 return __cmpxchg_case_1(ptr, old, new, timeout, max_try);
131 case 2:
132 return __cmpxchg_case_2(ptr, old, new, timeout, max_try);
133 case 4:
134 return __cmpxchg_case_4(ptr, old, new, timeout, max_try);
135 default:
136 return __bad_cmpxchg(ptr, size);
137 }
138
139 ASSERT_UNREACHABLE();
140 }
141
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)142 static always_inline unsigned long __cmpxchg(volatile void *ptr,
143 unsigned long old,
144 unsigned long new,
145 int size)
146 {
147 smp_mb();
148 if (!__int_cmpxchg(ptr, &old, new, size, false, 0))
149 ASSERT_UNREACHABLE();
150 smp_mb();
151
152 return old;
153 }
154
155 /*
156 * The helper may fail to update the memory if the action takes too long.
157 *
158 * @old: On call the value pointed contains the expected old value. It will be
159 * updated to the actual old value.
160 * @max_try: Maximum number of iterations
161 *
162 * The helper will return true when the update has succeeded (i.e no
163 * timeout) and false if the update has failed.
164 */
__cmpxchg_timeout(volatile void * ptr,unsigned long * old,unsigned long new,int size,unsigned int max_try)165 static always_inline bool __cmpxchg_timeout(volatile void *ptr,
166 unsigned long *old,
167 unsigned long new,
168 int size,
169 unsigned int max_try)
170 {
171 bool ret;
172
173 smp_mb();
174 ret = __int_cmpxchg(ptr, old, new, size, true, max_try);
175 smp_mb();
176
177 return ret;
178 }
179
180 /*
181 * The helper may fail to update the memory if the action takes too long.
182 *
183 * @old: On call the value pointed contains the expected old value. It will be
184 * updated to the actual old value.
185 * @max_try: Maximum number of iterations
186 *
187 * The helper will return true when the update has succeeded (i.e no
188 * timeout) and false if the update has failed.
189 */
__cmpxchg64_timeout(volatile uint64_t * ptr,uint64_t * old,uint64_t new,unsigned int max_try)190 static always_inline bool __cmpxchg64_timeout(volatile uint64_t *ptr,
191 uint64_t *old,
192 uint64_t new,
193 unsigned int max_try)
194 {
195 bool ret;
196
197 smp_mb();
198 ret = __cmpxchg_case_8(ptr, old, new, true, max_try);
199 smp_mb();
200
201 return ret;
202 }
203
204 #define cmpxchg(ptr,o,n) \
205 ((__typeof__(*(ptr)))__cmpxchg((ptr), \
206 (unsigned long)(o), \
207 (unsigned long)(n), \
208 sizeof(*(ptr))))
209
cmpxchg64(volatile uint64_t * ptr,uint64_t old,uint64_t new)210 static inline uint64_t cmpxchg64(volatile uint64_t *ptr,
211 uint64_t old,
212 uint64_t new)
213 {
214 smp_mb();
215 if (!__cmpxchg_case_8(ptr, &old, new, false, 0))
216 ASSERT_UNREACHABLE();
217 smp_mb();
218
219 return old;
220 }
221
222 #endif
223 /*
224 * Local variables:
225 * mode: C
226 * c-file-style: "BSD"
227 * c-basic-offset: 8
228 * indent-tabs-mode: t
229 * End:
230 */
231