1 /* Atomic operations. sparc32 version. 2 Copyright (C) 2003, 2004, 2006 Free Software Foundation, Inc. 3 This file is part of the GNU C Library. 4 Contributed by Jakub Jelinek <jakub@redhat.com>, 2003. 5 6 The GNU C Library is free software; you can redistribute it and/or 7 modify it under the terms of the GNU Lesser General Public 8 License as published by the Free Software Foundation; either 9 version 2.1 of the License, or (at your option) any later version. 10 11 The GNU C Library is distributed in the hope that it will be useful, 12 but WITHOUT ANY WARRANTY; without even the implied warranty of 13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 Lesser General Public License for more details. 15 16 You should have received a copy of the GNU Lesser General Public 17 License along with the GNU C Library; if not, see 18 <http://www.gnu.org/licenses/>. */ 19 20 #ifndef _BITS_ATOMIC_H 21 #define _BITS_ATOMIC_H 1 22 23 #include <stdint.h> 24 25 typedef int8_t atomic8_t; 26 typedef uint8_t uatomic8_t; 27 typedef int_fast8_t atomic_fast8_t; 28 typedef uint_fast8_t uatomic_fast8_t; 29 30 typedef int16_t atomic16_t; 31 typedef uint16_t uatomic16_t; 32 typedef int_fast16_t atomic_fast16_t; 33 typedef uint_fast16_t uatomic_fast16_t; 34 35 typedef int32_t atomic32_t; 36 typedef uint32_t uatomic32_t; 37 typedef int_fast32_t atomic_fast32_t; 38 typedef uint_fast32_t uatomic_fast32_t; 39 40 typedef int64_t atomic64_t; 41 typedef uint64_t uatomic64_t; 42 typedef int_fast64_t atomic_fast64_t; 43 typedef uint_fast64_t uatomic_fast64_t; 44 45 typedef intptr_t atomicptr_t; 46 typedef uintptr_t uatomicptr_t; 47 typedef intmax_t atomic_max_t; 48 typedef uintmax_t uatomic_max_t; 49 50 51 /* We have no compare and swap, just test and set. 52 The following implementation contends on 64 global locks 53 per library and assumes no variable will be accessed using atomic.h 54 macros from two different libraries. */ 55 56 __make_section_unallocated 57 (".gnu.linkonce.b.__sparc32_atomic_locks, \"aw\", %nobits"); 58 59 volatile unsigned char __sparc32_atomic_locks[64] 60 __attribute__ ((nocommon, section (".gnu.linkonce.b.__sparc32_atomic_locks" 61 __sec_comment), 62 visibility ("hidden"))); 63 64 #define __sparc32_atomic_do_lock(addr) \ 65 do \ 66 { \ 67 unsigned int __old_lock; \ 68 unsigned int __idx = (((long) addr >> 2) ^ ((long) addr >> 12)) \ 69 & 63; \ 70 do \ 71 __asm__ __volatile__ ("ldstub %1, %0" \ 72 : "=r" (__old_lock), \ 73 "=m" (__sparc32_atomic_locks[__idx]) \ 74 : "m" (__sparc32_atomic_locks[__idx]) \ 75 : "memory"); \ 76 while (__old_lock); \ 77 } \ 78 while (0) 79 80 #define __sparc32_atomic_do_unlock(addr) \ 81 do \ 82 { \ 83 __sparc32_atomic_locks[(((long) addr >> 2) \ 84 ^ ((long) addr >> 12)) & 63] = 0; \ 85 __asm__ __volatile__ ("" ::: "memory"); \ 86 } \ 87 while (0) 88 89 #define __sparc32_atomic_do_lock24(addr) \ 90 do \ 91 { \ 92 unsigned int __old_lock; \ 93 do \ 94 __asm__ __volatile__ ("ldstub %1, %0" \ 95 : "=r" (__old_lock), "=m" (*(addr)) \ 96 : "m" (*(addr)) \ 97 : "memory"); \ 98 while (__old_lock); \ 99 } \ 100 while (0) 101 102 #define __sparc32_atomic_do_unlock24(addr) \ 103 do \ 104 { \ 105 *(char *) (addr) = 0; \ 106 __asm__ __volatile__ ("" ::: "memory"); \ 107 } \ 108 while (0) 109 110 111 #ifndef SHARED 112 # define __v9_compare_and_exchange_val_32_acq(mem, newval, oldval) \ 113 ({ \ 114 register __typeof (*(mem)) __acev_tmp __asm__ ("%g6"); \ 115 register __typeof (mem) __acev_mem __asm__ ("%g1") = (mem); \ 116 register __typeof (*(mem)) __acev_oldval __asm__ ("%g5"); \ 117 __acev_tmp = (newval); \ 118 __acev_oldval = (oldval); \ 119 /* .word 0xcde05005 is cas [%g1], %g5, %g6. Can't use cas here though, \ 120 because as will then mark the object file as V8+ arch. */ \ 121 __asm__ __volatile__ (".word 0xcde05005" \ 122 : "+r" (__acev_tmp), "=m" (*__acev_mem) \ 123 : "r" (__acev_oldval), "m" (*__acev_mem), \ 124 "r" (__acev_mem) : "memory"); \ 125 __acev_tmp; }) 126 #endif 127 128 /* The only basic operation needed is compare and exchange. */ 129 #define __v7_compare_and_exchange_val_acq(mem, newval, oldval) \ 130 ({ __typeof (mem) __acev_memp = (mem); \ 131 __typeof (*mem) __acev_ret; \ 132 __typeof (*mem) __acev_newval = (newval); \ 133 \ 134 __sparc32_atomic_do_lock (__acev_memp); \ 135 __acev_ret = *__acev_memp; \ 136 if (__acev_ret == (oldval)) \ 137 *__acev_memp = __acev_newval; \ 138 __sparc32_atomic_do_unlock (__acev_memp); \ 139 __acev_ret; }) 140 141 #define __v7_compare_and_exchange_bool_acq(mem, newval, oldval) \ 142 ({ __typeof (mem) __aceb_memp = (mem); \ 143 int __aceb_ret; \ 144 __typeof (*mem) __aceb_newval = (newval); \ 145 \ 146 __sparc32_atomic_do_lock (__aceb_memp); \ 147 __aceb_ret = 0; \ 148 if (*__aceb_memp == (oldval)) \ 149 *__aceb_memp = __aceb_newval; \ 150 else \ 151 __aceb_ret = 1; \ 152 __sparc32_atomic_do_unlock (__aceb_memp); \ 153 __aceb_ret; }) 154 155 #define __v7_exchange_acq(mem, newval) \ 156 ({ __typeof (mem) __acev_memp = (mem); \ 157 __typeof (*mem) __acev_ret; \ 158 __typeof (*mem) __acev_newval = (newval); \ 159 \ 160 __sparc32_atomic_do_lock (__acev_memp); \ 161 __acev_ret = *__acev_memp; \ 162 *__acev_memp = __acev_newval; \ 163 __sparc32_atomic_do_unlock (__acev_memp); \ 164 __acev_ret; }) 165 166 #define __v7_exchange_and_add(mem, value) \ 167 ({ __typeof (mem) __acev_memp = (mem); \ 168 __typeof (*mem) __acev_ret; \ 169 \ 170 __sparc32_atomic_do_lock (__acev_memp); \ 171 __acev_ret = *__acev_memp; \ 172 *__acev_memp = __acev_ret + (value); \ 173 __sparc32_atomic_do_unlock (__acev_memp); \ 174 __acev_ret; }) 175 176 /* Special versions, which guarantee that top 8 bits of all values 177 are cleared and use those bits as the ldstub lock. */ 178 #define __v7_compare_and_exchange_val_24_acq(mem, newval, oldval) \ 179 ({ __typeof (mem) __acev_memp = (mem); \ 180 __typeof (*mem) __acev_ret; \ 181 __typeof (*mem) __acev_newval = (newval); \ 182 \ 183 __sparc32_atomic_do_lock24 (__acev_memp); \ 184 __acev_ret = *__acev_memp & 0xffffff; \ 185 if (__acev_ret == (oldval)) \ 186 *__acev_memp = __acev_newval; \ 187 else \ 188 __sparc32_atomic_do_unlock24 (__acev_memp); \ 189 __asm__ __volatile__ ("" ::: "memory"); \ 190 __acev_ret; }) 191 192 #define __v7_exchange_24_rel(mem, newval) \ 193 ({ __typeof (mem) __acev_memp = (mem); \ 194 __typeof (*mem) __acev_ret; \ 195 __typeof (*mem) __acev_newval = (newval); \ 196 \ 197 __sparc32_atomic_do_lock24 (__acev_memp); \ 198 __acev_ret = *__acev_memp & 0xffffff; \ 199 *__acev_memp = __acev_newval; \ 200 __asm__ __volatile__ ("" ::: "memory"); \ 201 __acev_ret; }) 202 203 #ifdef SHARED 204 205 /* When dynamically linked, we assume pre-v9 libraries are only ever 206 used on pre-v9 CPU. */ 207 # define __atomic_is_v9 0 208 209 # define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \ 210 __v7_compare_and_exchange_val_acq (mem, newval, oldval) 211 212 # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \ 213 __v7_compare_and_exchange_bool_acq (mem, newval, oldval) 214 215 # define atomic_exchange_acq(mem, newval) \ 216 __v7_exchange_acq (mem, newval) 217 218 # define atomic_exchange_and_add(mem, value) \ 219 __v7_exchange_and_add (mem, value) 220 221 # define atomic_compare_and_exchange_val_24_acq(mem, newval, oldval) \ 222 ({ \ 223 if (sizeof (*mem) != 4) \ 224 abort (); \ 225 __v7_compare_and_exchange_val_24_acq (mem, newval, oldval); }) 226 227 # define atomic_exchange_24_rel(mem, newval) \ 228 ({ \ 229 if (sizeof (*mem) != 4) \ 230 abort (); \ 231 __v7_exchange_24_rel (mem, newval); }) 232 233 #else 234 235 236 237 /* 238 Here's what we'd like to do: 239 240 In libc.a/libpthread.a etc. we don't know if we'll be run on 241 pre-v9 or v9 CPU. To be interoperable with dynamically linked 242 apps on v9 CPUs e.g. with process shared primitives, use cas insn 243 on v9 CPUs and ldstub on pre-v9. 244 245 However, we have no good way to test at run time that I know of, 246 so resort to the lowest common denominator (v7 ops) -austinf 247 */ 248 #define __atomic_is_v9 0 249 250 # define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \ 251 ({ \ 252 __typeof (*mem) __acev_wret; \ 253 if (sizeof (*mem) != 4) \ 254 abort (); \ 255 if (__atomic_is_v9) \ 256 __acev_wret \ 257 = __v9_compare_and_exchange_val_32_acq (mem, newval, oldval);\ 258 else \ 259 __acev_wret \ 260 = __v7_compare_and_exchange_val_acq (mem, newval, oldval); \ 261 __acev_wret; }) 262 263 # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \ 264 ({ \ 265 int __acev_wret; \ 266 if (sizeof (*mem) != 4) \ 267 abort (); \ 268 if (__atomic_is_v9) \ 269 { \ 270 __typeof (oldval) __acev_woldval = (oldval); \ 271 __acev_wret \ 272 = __v9_compare_and_exchange_val_32_acq (mem, newval, \ 273 __acev_woldval) \ 274 != __acev_woldval; \ 275 } \ 276 else \ 277 __acev_wret \ 278 = __v7_compare_and_exchange_bool_acq (mem, newval, oldval); \ 279 __acev_wret; }) 280 281 # define atomic_exchange_rel(mem, newval) \ 282 ({ \ 283 __typeof (*mem) __acev_wret; \ 284 if (sizeof (*mem) != 4) \ 285 abort (); \ 286 if (__atomic_is_v9) \ 287 { \ 288 __typeof (mem) __acev_wmemp = (mem); \ 289 __typeof (*(mem)) __acev_wval = (newval); \ 290 do \ 291 __acev_wret = *__acev_wmemp; \ 292 while (unlikely \ 293 (__v9_compare_and_exchange_val_32_acq (__acev_wmemp,\ 294 __acev_wval, \ 295 __acev_wret) \ 296 != __acev_wret)); \ 297 } \ 298 else \ 299 __acev_wret = __v7_exchange_acq (mem, newval); \ 300 __acev_wret; }) 301 302 # define atomic_compare_and_exchange_val_24_acq(mem, newval, oldval) \ 303 ({ \ 304 __typeof (*mem) __acev_wret; \ 305 if (sizeof (*mem) != 4) \ 306 abort (); \ 307 if (__atomic_is_v9) \ 308 __acev_wret \ 309 = __v9_compare_and_exchange_val_32_acq (mem, newval, oldval);\ 310 else \ 311 __acev_wret \ 312 = __v7_compare_and_exchange_val_24_acq (mem, newval, oldval);\ 313 __acev_wret; }) 314 315 # define atomic_exchange_24_rel(mem, newval) \ 316 ({ \ 317 __typeof (*mem) __acev_w24ret; \ 318 if (sizeof (*mem) != 4) \ 319 abort (); \ 320 if (__atomic_is_v9) \ 321 __acev_w24ret = atomic_exchange_rel (mem, newval); \ 322 else \ 323 __acev_w24ret = __v7_exchange_24_rel (mem, newval); \ 324 __acev_w24ret; }) 325 326 #endif 327 328 #endif /* bits/atomic.h */ 329