1 #ifndef __ASM_ARM32_CMPXCHG_H
2 #define __ASM_ARM32_CMPXCHG_H
3
4 #include <xen/prefetch.h>
5
6 extern void __bad_xchg(volatile void *, int);
7
__xchg(unsigned long x,volatile void * ptr,int size)8 static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
9 {
10 unsigned long ret;
11 unsigned int tmp;
12
13 smp_mb();
14 prefetchw((const void *)ptr);
15
16 switch (size) {
17 case 1:
18 asm volatile("@ __xchg1\n"
19 "1: ldrexb %0, [%3]\n"
20 " strexb %1, %2, [%3]\n"
21 " teq %1, #0\n"
22 " bne 1b"
23 : "=&r" (ret), "=&r" (tmp)
24 : "r" (x), "r" (ptr)
25 : "memory", "cc");
26 break;
27 case 4:
28 asm volatile("@ __xchg4\n"
29 "1: ldrex %0, [%3]\n"
30 " strex %1, %2, [%3]\n"
31 " teq %1, #0\n"
32 " bne 1b"
33 : "=&r" (ret), "=&r" (tmp)
34 : "r" (x), "r" (ptr)
35 : "memory", "cc");
36 break;
37 default:
38 __bad_xchg(ptr, size), ret = 0;
39 break;
40 }
41 smp_mb();
42
43 return ret;
44 }
45
46 #define xchg(ptr,x) \
47 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
48
49 /*
50 * Atomic compare and exchange. Compare OLD with MEM, if identical,
51 * store NEW in MEM. Return the initial value in MEM. Success is
52 * indicated by comparing RETURN with OLD.
53 */
54
55 extern void __bad_cmpxchg(volatile void *ptr, int size);
56
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)57 static always_inline unsigned long __cmpxchg(
58 volatile void *ptr, unsigned long old, unsigned long new, int size)
59 {
60 unsigned long oldval, res;
61
62 prefetchw((const void *)ptr);
63
64 switch (size) {
65 case 1:
66 do {
67 asm volatile("@ __cmpxchg1\n"
68 " ldrexb %1, [%2]\n"
69 " mov %0, #0\n"
70 " teq %1, %3\n"
71 " strexbeq %0, %4, [%2]\n"
72 : "=&r" (res), "=&r" (oldval)
73 : "r" (ptr), "Ir" (old), "r" (new)
74 : "memory", "cc");
75 } while (res);
76 break;
77 case 2:
78 do {
79 asm volatile("@ __cmpxchg2\n"
80 " ldrexh %1, [%2]\n"
81 " mov %0, #0\n"
82 " teq %1, %3\n"
83 " strexheq %0, %4, [%2]\n"
84 : "=&r" (res), "=&r" (oldval)
85 : "r" (ptr), "Ir" (old), "r" (new)
86 : "memory", "cc");
87 } while (res);
88 break;
89 case 4:
90 do {
91 asm volatile("@ __cmpxchg4\n"
92 " ldrex %1, [%2]\n"
93 " mov %0, #0\n"
94 " teq %1, %3\n"
95 " strexeq %0, %4, [%2]\n"
96 : "=&r" (res), "=&r" (oldval)
97 : "r" (ptr), "Ir" (old), "r" (new)
98 : "memory", "cc");
99 } while (res);
100 break;
101 #if 0
102 case 8:
103 do {
104 asm volatile("@ __cmpxchg8\n"
105 " ldrexd %1, [%2]\n"
106 " mov %0, #0\n"
107 " teq %1, %3\n"
108 " strexdeq %0, %4, [%2]\n"
109 : "=&r" (res), "=&r" (oldval)
110 : "r" (ptr), "Ir" (old), "r" (new)
111 : "memory", "cc");
112 } while (res);
113 break;
114 #endif
115 default:
116 __bad_cmpxchg(ptr, size);
117 oldval = 0;
118 }
119
120 return oldval;
121 }
122
__cmpxchg_mb(volatile void * ptr,unsigned long old,unsigned long new,int size)123 static always_inline unsigned long __cmpxchg_mb(volatile void *ptr,
124 unsigned long old,
125 unsigned long new, int size)
126 {
127 unsigned long ret;
128
129 smp_mb();
130 ret = __cmpxchg(ptr, old, new, size);
131 smp_mb();
132
133 return ret;
134 }
135
136 #define cmpxchg(ptr,o,n) \
137 ((__typeof__(*(ptr)))__cmpxchg_mb((ptr), \
138 (unsigned long)(o), \
139 (unsigned long)(n), \
140 sizeof(*(ptr))))
141
142 #define cmpxchg_local(ptr,o,n) \
143 ((__typeof__(*(ptr)))__cmpxchg((ptr), \
144 (unsigned long)(o), \
145 (unsigned long)(n), \
146 sizeof(*(ptr))))
147 #endif
148 /*
149 * Local variables:
150 * mode: C
151 * c-file-style: "BSD"
152 * c-basic-offset: 8
153 * indent-tabs-mode: t
154 * End:
155 */
156