1 #ifndef __ASM_ARM64_CMPXCHG_H
2 #define __ASM_ARM64_CMPXCHG_H
3
4 extern void __bad_xchg(volatile void *, int);
5
__xchg(unsigned long x,volatile void * ptr,int size)6 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
7 {
8 unsigned long ret, tmp;
9
10 switch (size) {
11 case 1:
12 asm volatile("// __xchg1\n"
13 "1: ldxrb %w0, %2\n"
14 " stlxrb %w1, %w3, %2\n"
15 " cbnz %w1, 1b\n"
16 : "=&r" (ret), "=&r" (tmp), "+Q" (*(u8 *)ptr)
17 : "r" (x)
18 : "memory");
19 break;
20 case 2:
21 asm volatile("// __xchg2\n"
22 "1: ldxrh %w0, %2\n"
23 " stlxrh %w1, %w3, %2\n"
24 " cbnz %w1, 1b\n"
25 : "=&r" (ret), "=&r" (tmp), "+Q" (*(u16 *)ptr)
26 : "r" (x)
27 : "memory");
28 break;
29 case 4:
30 asm volatile("// __xchg4\n"
31 "1: ldxr %w0, %2\n"
32 " stlxr %w1, %w3, %2\n"
33 " cbnz %w1, 1b\n"
34 : "=&r" (ret), "=&r" (tmp), "+Q" (*(u32 *)ptr)
35 : "r" (x)
36 : "memory");
37 break;
38 case 8:
39 asm volatile("// __xchg8\n"
40 "1: ldxr %0, %2\n"
41 " stlxr %w1, %3, %2\n"
42 " cbnz %w1, 1b\n"
43 : "=&r" (ret), "=&r" (tmp), "+Q" (*(u64 *)ptr)
44 : "r" (x)
45 : "memory");
46 break;
47 default:
48 __bad_xchg(ptr, size), ret = 0;
49 break;
50 }
51
52 smp_mb();
53 return ret;
54 }
55
56 #define xchg(ptr,x) \
57 ({ \
58 __typeof__(*(ptr)) __ret; \
59 __ret = (__typeof__(*(ptr))) \
60 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \
61 __ret; \
62 })
63
64 extern void __bad_cmpxchg(volatile void *ptr, int size);
65
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)66 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
67 unsigned long new, int size)
68 {
69 unsigned long oldval = 0, res;
70
71 switch (size) {
72 case 1:
73 do {
74 asm volatile("// __cmpxchg1\n"
75 " ldxrb %w1, %2\n"
76 " mov %w0, #0\n"
77 " cmp %w1, %w3\n"
78 " b.ne 1f\n"
79 " stxrb %w0, %w4, %2\n"
80 "1:\n"
81 : "=&r" (res), "=&r" (oldval), "+Q" (*(u8 *)ptr)
82 : "Ir" (old), "r" (new)
83 : "cc");
84 } while (res);
85 break;
86
87 case 2:
88 do {
89 asm volatile("// __cmpxchg2\n"
90 " ldxrh %w1, %2\n"
91 " mov %w0, #0\n"
92 " cmp %w1, %w3\n"
93 " b.ne 1f\n"
94 " stxrh %w0, %w4, %2\n"
95 "1:\n"
96 : "=&r" (res), "=&r" (oldval), "+Q" (*(u16 *)ptr)
97 : "Ir" (old), "r" (new)
98 : "cc");
99 } while (res);
100 break;
101
102 case 4:
103 do {
104 asm volatile("// __cmpxchg4\n"
105 " ldxr %w1, %2\n"
106 " mov %w0, #0\n"
107 " cmp %w1, %w3\n"
108 " b.ne 1f\n"
109 " stxr %w0, %w4, %2\n"
110 "1:\n"
111 : "=&r" (res), "=&r" (oldval), "+Q" (*(u32 *)ptr)
112 : "Ir" (old), "r" (new)
113 : "cc");
114 } while (res);
115 break;
116
117 case 8:
118 do {
119 asm volatile("// __cmpxchg8\n"
120 " ldxr %1, %2\n"
121 " mov %w0, #0\n"
122 " cmp %1, %3\n"
123 " b.ne 1f\n"
124 " stxr %w0, %4, %2\n"
125 "1:\n"
126 : "=&r" (res), "=&r" (oldval), "+Q" (*(u64 *)ptr)
127 : "Ir" (old), "r" (new)
128 : "cc");
129 } while (res);
130 break;
131
132 default:
133 __bad_cmpxchg(ptr, size);
134 oldval = 0;
135 }
136
137 return oldval;
138 }
139
__cmpxchg_mb(volatile void * ptr,unsigned long old,unsigned long new,int size)140 static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
141 unsigned long new, int size)
142 {
143 unsigned long ret;
144
145 smp_mb();
146 ret = __cmpxchg(ptr, old, new, size);
147 smp_mb();
148
149 return ret;
150 }
151
152 #define cmpxchg(ptr, o, n) \
153 ({ \
154 __typeof__(*(ptr)) __ret; \
155 __ret = (__typeof__(*(ptr))) \
156 __cmpxchg_mb((ptr), (unsigned long)(o), (unsigned long)(n), \
157 sizeof(*(ptr))); \
158 __ret; \
159 })
160
161 #define cmpxchg_local(ptr, o, n) \
162 ({ \
163 __typeof__(*(ptr)) __ret; \
164 __ret = (__typeof__(*(ptr))) \
165 __cmpxchg((ptr), (unsigned long)(o), \
166 (unsigned long)(n), sizeof(*(ptr))); \
167 __ret; \
168 })
169
170 #endif
171 /*
172 * Local variables:
173 * mode: C
174 * c-file-style: "BSD"
175 * c-basic-offset: 8
176 * indent-tabs-mode: t
177 * End:
178 */
179