1 /*****************************************************************************/
2 /**
3  * \file
4  * \brief   atomic operations header and generic implementations
5  * \ingroup l4util_atomic
6  *
7  * \date    10/20/2000
8  * \author  Lars Reuther <reuther@os.inf.tu-dresden.de>,
9  *          Jork Loeser  <jork@os.inf.tu-dresden.de> */
10 /*
11  * (c) 2000-2009 Author(s)
12  *     economic rights: Technische Universität Dresden (Germany)
13  * This file is part of TUD:OS and distributed under the terms of the
14  * GNU Lesser General Public License 2.1.
15  * Please see the COPYING-LGPL-2.1 file for details.
16  */
17 
18 /*****************************************************************************/
19 #ifndef __L4UTIL__INCLUDE__ATOMIC_H__
20 #define __L4UTIL__INCLUDE__ATOMIC_H__
21 
22 #include <l4/sys/l4int.h>
23 #include <l4/sys/compiler.h>
24 
25 /*****************************************************************************
26  *** Prototypes
27  *****************************************************************************/
28 
29 EXTERN_C_BEGIN
30 
31 /**
32  * \defgroup l4util_atomic Atomic Instructions
33  * \ingroup l4util_api
34  */
35 
36 /**
37  * \brief Atomic compare and exchange (64 bit version)
38  * \ingroup l4util_atomic
39  *
40  * \param  dest          destination operand
41  * \param  cmp_val       compare value
42  * \param  new_val       new value for dest
43  *
44  * \return 0 if comparison failed, 1 otherwise
45  *
46  * Compare the value in \em dest with \em cmp_val, if equal set \em dest to
47  * \em new_val
48  */
49 L4_INLINE int
50 l4util_cmpxchg64(volatile l4_uint64_t * dest,
51                  l4_uint64_t cmp_val, l4_uint64_t new_val);
52 
53 /**
54  * \brief Atomic compare and exchange (32 bit version)
55  * \ingroup l4util_atomic
56  *
57  * \param  dest          destination operand
58  * \param  cmp_val       compare value
59  * \param  new_val       new value for dest
60  *
61  * \return 0 if comparison failed, !=0 otherwise
62  *
63  * Compare the value in \em dest with \em cmp_val, if equal set \em dest to
64  * \em new_val
65  */
66 L4_INLINE int
67 l4util_cmpxchg32(volatile l4_uint32_t * dest,
68                  l4_uint32_t cmp_val, l4_uint32_t new_val);
69 
70 /**
71  * \brief Atomic compare and exchange (16 bit version)
72  * \ingroup l4util_atomic
73  *
74  * \param  dest          destination operand
75  * \param  cmp_val       compare value
76  * \param  new_val       new value for dest
77  *
78  * \return 0 if comparison failed, !=0 otherwise
79  *
80  * Compare the value in \em dest with \em cmp_val, if equal set \em dest to
81  * \em new_val
82  */
83 L4_INLINE int
84 l4util_cmpxchg16(volatile l4_uint16_t * dest,
85                  l4_uint16_t cmp_val, l4_uint16_t new_val);
86 
87 /**
88  * \brief Atomic compare and exchange (8 bit version)
89  * \ingroup l4util_atomic
90  *
91  * \param  dest          destination operand
92  * \param  cmp_val       compare value
93  * \param  new_val       new value for dest
94  *
95  * \return 0 if comparison failed, !=0 otherwise
96  *
97  * Compare the value in \em dest with \em cmp_val, if equal set \em dest to
98  * \em new_val
99  */
100 L4_INLINE int
101 l4util_cmpxchg8(volatile l4_uint8_t * dest,
102                 l4_uint8_t cmp_val, l4_uint8_t new_val);
103 
104 /**
105  * \brief Atomic compare and exchange (machine wide fields)
106  * \ingroup l4util_atomic
107  *
108  * \param  dest          destination operand
109  * \param  cmp_val       compare value
110  * \param  new_val       new value for dest
111  *
112  * \return 0 if comparison failed, 1 otherwise
113  *
114  * Compare the value in \em dest with \em cmp_val, if equal set \em dest to
115  * \em new_val
116  */
117 L4_INLINE int
118 l4util_cmpxchg(volatile l4_umword_t * dest,
119                l4_umword_t cmp_val, l4_umword_t new_val);
120 
121 /**
122  * \brief Atomic exchange (32 bit version)
123  * \ingroup l4util_atomic
124  *
125  * \param  dest          destination operand
126  * \param  val           new value for dest
127  *
128  * \return old value at destination
129  */
130 L4_INLINE l4_uint32_t
131 l4util_xchg32(volatile l4_uint32_t * dest, l4_uint32_t val);
132 
133 /**
134  * \brief Atomic exchange (16 bit version)
135  * \ingroup l4util_atomic
136  *
137  * \param  dest          destination operand
138  * \param  val           new value for dest
139  *
140  * \return old value at destination
141  */
142 L4_INLINE l4_uint16_t
143 l4util_xchg16(volatile l4_uint16_t * dest, l4_uint16_t val);
144 
145 /**
146  * \brief Atomic exchange (8 bit version)
147  * \ingroup l4util_atomic
148  *
149  * \param  dest          destination operand
150  * \param  val           new value for dest
151  *
152  * \return old value at destination
153  */
154 L4_INLINE l4_uint8_t
155 l4util_xchg8(volatile l4_uint8_t * dest, l4_uint8_t val);
156 
157 /**
158  * \brief Atomic exchange (machine wide fields)
159  * \ingroup l4util_atomic
160  *
161  * \param  dest          destination operand
162  * \param  val           new value for dest
163  *
164  * \return old value at destination
165  */
166 L4_INLINE l4_umword_t
167 l4util_xchg(volatile l4_umword_t * dest, l4_umword_t val);
168 
169 //!@name Atomic add/sub/and/or (8,16,32 bit version) without result
170 /** @{
171  * \ingroup l4util_atomic
172  *
173  * \param  dest          destination operand
174  * \param  val           value to add/sub/and/or
175  */
176 L4_INLINE void
177 l4util_add8(volatile l4_uint8_t *dest, l4_uint8_t val);
178 /// \copydoc l4util_add8
179 L4_INLINE void
180 l4util_add16(volatile l4_uint16_t *dest, l4_uint16_t val);
181 /// \copydoc l4util_add8
182 L4_INLINE void
183 l4util_add32(volatile l4_uint32_t *dest, l4_uint32_t val);
184 /// \copydoc l4util_add8
185 L4_INLINE void
186 l4util_sub8(volatile l4_uint8_t *dest, l4_uint8_t val);
187 /// \copydoc l4util_add8
188 L4_INLINE void
189 l4util_sub16(volatile l4_uint16_t *dest, l4_uint16_t val);
190 /// \copydoc l4util_add8
191 L4_INLINE void
192 l4util_sub32(volatile l4_uint32_t *dest, l4_uint32_t val);
193 /// \copydoc l4util_add8
194 L4_INLINE void
195 l4util_and8(volatile l4_uint8_t *dest, l4_uint8_t val);
196 /// \copydoc l4util_add8
197 L4_INLINE void
198 l4util_and16(volatile l4_uint16_t *dest, l4_uint16_t val);
199 /// \copydoc l4util_add8
200 L4_INLINE void
201 l4util_and32(volatile l4_uint32_t *dest, l4_uint32_t val);
202 /// \copydoc l4util_add8
203 L4_INLINE void
204 l4util_or8(volatile l4_uint8_t *dest, l4_uint8_t val);
205 /// \copydoc l4util_add8
206 L4_INLINE void
207 l4util_or16(volatile l4_uint16_t *dest, l4_uint16_t val);
208 /// \copydoc l4util_add8
209 L4_INLINE void
210 l4util_or32(volatile l4_uint32_t *dest, l4_uint32_t val);
211 ///@}
212 
213 //!@name Atomic add/sub/and/or operations (8,16,32 bit) with result
214 /** @{
215  * \ingroup l4util_atomic
216  *
217  * \param  dest          destination operand
218  * \param  val           value to add/sub/and/or
219  * \return res
220  */
221 L4_INLINE l4_uint8_t
222 l4util_add8_res(volatile l4_uint8_t *dest, l4_uint8_t val);
223 /// \copydoc l4util_add8_res
224 L4_INLINE l4_uint16_t
225 l4util_add16_res(volatile l4_uint16_t *dest, l4_uint16_t val);
226 /// \copydoc l4util_add8_res
227 L4_INLINE l4_uint32_t
228 l4util_add32_res(volatile l4_uint32_t *dest, l4_uint32_t val);
229 /// \copydoc l4util_add8_res
230 L4_INLINE l4_uint8_t
231 l4util_sub8_res(volatile l4_uint8_t *dest, l4_uint8_t val);
232 /// \copydoc l4util_add8_res
233 L4_INLINE l4_uint16_t
234 l4util_sub16_res(volatile l4_uint16_t *dest, l4_uint16_t val);
235 /// \copydoc l4util_add8_res
236 L4_INLINE l4_uint32_t
237 l4util_sub32_res(volatile l4_uint32_t *dest, l4_uint32_t val);
238 /// \copydoc l4util_add8_res
239 L4_INLINE l4_uint8_t
240 l4util_and8_res(volatile l4_uint8_t *dest, l4_uint8_t val);
241 /// \copydoc l4util_add8_res
242 L4_INLINE l4_uint16_t
243 l4util_and16_res(volatile l4_uint16_t *dest, l4_uint16_t val);
244 /// \copydoc l4util_add8_res
245 L4_INLINE l4_uint32_t
246 l4util_and32_res(volatile l4_uint32_t *dest, l4_uint32_t val);
247 /// \copydoc l4util_add8_res
248 L4_INLINE l4_uint8_t
249 l4util_or8_res(volatile l4_uint8_t *dest, l4_uint8_t val);
250 /// \copydoc l4util_add8_res
251 L4_INLINE l4_uint16_t
252 l4util_or16_res(volatile l4_uint16_t *dest, l4_uint16_t val);
253 /// \copydoc l4util_add8_res
254 L4_INLINE l4_uint32_t
255 l4util_or32_res(volatile l4_uint32_t *dest, l4_uint32_t val);
256 ///@}
257 
258 //!@name Atomic inc/dec (8,16,32 bit) without result
259 /** @{
260  * \ingroup l4util_atomic
261  *
262  * \param  dest          destination operand
263  */
264 L4_INLINE void
265 l4util_inc8(volatile l4_uint8_t *dest);
266 /// \copydoc l4util_inc8
267 L4_INLINE void
268 l4util_inc16(volatile l4_uint16_t *dest);
269 /// \copydoc l4util_inc8
270 L4_INLINE void
271 l4util_inc32(volatile l4_uint32_t *dest);
272 /// \copydoc l4util_inc8
273 L4_INLINE void
274 l4util_dec8(volatile l4_uint8_t *dest);
275 /// \copydoc l4util_inc8
276 L4_INLINE void
277 l4util_dec16(volatile l4_uint16_t *dest);
278 /// \copydoc l4util_inc8
279 L4_INLINE void
280 l4util_dec32(volatile l4_uint32_t *dest);
281 ///@}
282 
283 //!@name Atomic inc/dec (8,16,32 bit) with result
284 /** @{
285  * \ingroup l4util_atomic
286  *
287  * \param  dest          destination operand
288  * \return res
289  */
290 L4_INLINE l4_uint8_t
291 l4util_inc8_res(volatile l4_uint8_t *dest);
292 /// \copydoc l4util_inc8_res
293 L4_INLINE l4_uint16_t
294 l4util_inc16_res(volatile l4_uint16_t *dest);
295 /// \copydoc l4util_inc8_res
296 L4_INLINE l4_uint32_t
297 l4util_inc32_res(volatile l4_uint32_t *dest);
298 /// \copydoc l4util_inc8_res
299 L4_INLINE l4_uint8_t
300 l4util_dec8_res(volatile l4_uint8_t *dest);
301 /// \copydoc l4util_inc8_res
302 L4_INLINE l4_uint16_t
303 l4util_dec16_res(volatile l4_uint16_t *dest);
304 /// \copydoc l4util_inc8_res
305 L4_INLINE l4_uint32_t
306 l4util_dec32_res(volatile l4_uint32_t *dest);
307 ///@}
308 
309 /**
310  * \brief Atomic add
311  * \ingroup l4util_atomic
312  *
313  * \param  dest      destination operand
314  * \param  val       value to add
315  */
316 L4_INLINE void
317 l4util_atomic_add(volatile long *dest, long val);
318 
319 /**
320  * \brief Atomic increment
321  * \ingroup l4util_atomic
322  *
323  * \param  dest      destination operand
324  */
325 L4_INLINE void
326 l4util_atomic_inc(volatile long *dest);
327 
328 EXTERN_C_END
329 
330 /*****************
331  * IMPLEMENTAION *
332  *****************/
333 
334 L4_INLINE int
l4util_cmpxchg64(volatile l4_uint64_t * dest,l4_uint64_t cmp_val,l4_uint64_t new_val)335 l4util_cmpxchg64(volatile l4_uint64_t * dest,
336                  l4_uint64_t cmp_val, l4_uint64_t new_val)
337 {
338   return __atomic_compare_exchange_n(dest, &cmp_val, new_val, 0,
339                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
340 }
341 
342 L4_INLINE int
l4util_cmpxchg32(volatile l4_uint32_t * dest,l4_uint32_t cmp_val,l4_uint32_t new_val)343 l4util_cmpxchg32(volatile l4_uint32_t * dest,
344                  l4_uint32_t cmp_val, l4_uint32_t new_val)
345 {
346   return __atomic_compare_exchange_n(dest, &cmp_val, new_val, 0,
347                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
348 }
349 
350 L4_INLINE int
l4util_cmpxchg16(volatile l4_uint16_t * dest,l4_uint16_t cmp_val,l4_uint16_t new_val)351 l4util_cmpxchg16(volatile l4_uint16_t * dest,
352                  l4_uint16_t cmp_val, l4_uint16_t new_val)
353 {
354   return __atomic_compare_exchange_n(dest, &cmp_val, new_val, 0,
355                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
356 }
357 
358 L4_INLINE int
l4util_cmpxchg8(volatile l4_uint8_t * dest,l4_uint8_t cmp_val,l4_uint8_t new_val)359 l4util_cmpxchg8(volatile l4_uint8_t * dest,
360                 l4_uint8_t cmp_val, l4_uint8_t new_val)
361 {
362   return __atomic_compare_exchange_n(dest, &cmp_val, new_val, 0,
363                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
364 }
365 
366 L4_INLINE int
l4util_cmpxchg(volatile l4_umword_t * dest,l4_umword_t cmp_val,l4_umword_t new_val)367 l4util_cmpxchg(volatile l4_umword_t * dest,
368                l4_umword_t cmp_val, l4_umword_t new_val)
369 {
370   return __atomic_compare_exchange_n(dest, &cmp_val, new_val, 0,
371                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
372 }
373 
374 L4_INLINE l4_uint32_t
l4util_xchg32(volatile l4_uint32_t * dest,l4_uint32_t val)375 l4util_xchg32(volatile l4_uint32_t * dest, l4_uint32_t val)
376 {
377   return __atomic_exchange_n(dest, val, __ATOMIC_SEQ_CST);
378 }
379 
380 L4_INLINE l4_uint16_t
l4util_xchg16(volatile l4_uint16_t * dest,l4_uint16_t val)381 l4util_xchg16(volatile l4_uint16_t * dest, l4_uint16_t val)
382 {
383   return __atomic_exchange_n(dest, val, __ATOMIC_SEQ_CST);
384 }
385 
386 L4_INLINE l4_uint8_t
l4util_xchg8(volatile l4_uint8_t * dest,l4_uint8_t val)387 l4util_xchg8(volatile l4_uint8_t * dest, l4_uint8_t val)
388 {
389   return __atomic_exchange_n(dest, val, __ATOMIC_SEQ_CST);
390 }
391 
392 L4_INLINE l4_umword_t
l4util_xchg(volatile l4_umword_t * dest,l4_umword_t val)393 l4util_xchg(volatile l4_umword_t * dest, l4_umword_t val)
394 {
395   return __atomic_exchange_n(dest, val, __ATOMIC_SEQ_CST);
396 }
397 
398 L4_INLINE void
l4util_inc8(volatile l4_uint8_t * dest)399 l4util_inc8(volatile l4_uint8_t *dest)
400 { __atomic_fetch_add(dest, 1, __ATOMIC_SEQ_CST); }
401 
402 L4_INLINE void
l4util_inc16(volatile l4_uint16_t * dest)403 l4util_inc16(volatile l4_uint16_t *dest)
404 { __atomic_fetch_add(dest, 1, __ATOMIC_SEQ_CST); }
405 
406 L4_INLINE void
l4util_inc32(volatile l4_uint32_t * dest)407 l4util_inc32(volatile l4_uint32_t *dest)
408 { __atomic_fetch_add(dest, 1, __ATOMIC_SEQ_CST); }
409 
410 L4_INLINE void
l4util_atomic_inc(volatile long * dest)411 l4util_atomic_inc(volatile long *dest)
412 { __atomic_fetch_add(dest, 1, __ATOMIC_SEQ_CST); }
413 
414 L4_INLINE void
l4util_dec8(volatile l4_uint8_t * dest)415 l4util_dec8(volatile l4_uint8_t *dest)
416 { __atomic_fetch_sub(dest, 1, __ATOMIC_SEQ_CST); }
417 
418 L4_INLINE void
l4util_dec16(volatile l4_uint16_t * dest)419 l4util_dec16(volatile l4_uint16_t *dest)
420 { __atomic_fetch_sub(dest, 1, __ATOMIC_SEQ_CST); }
421 
422 L4_INLINE void
l4util_dec32(volatile l4_uint32_t * dest)423 l4util_dec32(volatile l4_uint32_t *dest)
424 { __atomic_fetch_sub(dest, 1, __ATOMIC_SEQ_CST); }
425 
426 
427 L4_INLINE l4_uint8_t
l4util_inc8_res(volatile l4_uint8_t * dest)428 l4util_inc8_res(volatile l4_uint8_t *dest)
429 { return __atomic_add_fetch(dest, 1, __ATOMIC_SEQ_CST); }
430 
431 L4_INLINE l4_uint16_t
l4util_inc16_res(volatile l4_uint16_t * dest)432 l4util_inc16_res(volatile l4_uint16_t *dest)
433 { return __atomic_add_fetch(dest, 1, __ATOMIC_SEQ_CST); }
434 
435 L4_INLINE l4_uint32_t
l4util_inc32_res(volatile l4_uint32_t * dest)436 l4util_inc32_res(volatile l4_uint32_t *dest)
437 { return __atomic_add_fetch(dest, 1, __ATOMIC_SEQ_CST); }
438 
439 L4_INLINE l4_uint8_t
l4util_dec8_res(volatile l4_uint8_t * dest)440 l4util_dec8_res(volatile l4_uint8_t *dest)
441 { return __atomic_sub_fetch(dest, 1, __ATOMIC_SEQ_CST); }
442 
443 L4_INLINE l4_uint16_t
l4util_dec16_res(volatile l4_uint16_t * dest)444 l4util_dec16_res(volatile l4_uint16_t *dest)
445 { return __atomic_sub_fetch(dest, 1, __ATOMIC_SEQ_CST); }
446 
447 L4_INLINE l4_uint32_t
l4util_dec32_res(volatile l4_uint32_t * dest)448 l4util_dec32_res(volatile l4_uint32_t *dest)
449 { return __atomic_sub_fetch(dest, 1, __ATOMIC_SEQ_CST); }
450 
451 L4_INLINE l4_umword_t
l4util_dec_res(volatile l4_umword_t * dest)452 l4util_dec_res(volatile l4_umword_t *dest)
453 { return __atomic_sub_fetch(dest, 1, __ATOMIC_SEQ_CST); }
454 
455 L4_INLINE void
l4util_add8(volatile l4_uint8_t * dest,l4_uint8_t val)456 l4util_add8(volatile l4_uint8_t *dest, l4_uint8_t val)
457 { __atomic_fetch_add(dest, val, __ATOMIC_SEQ_CST); }
458 
459 L4_INLINE void
l4util_add16(volatile l4_uint16_t * dest,l4_uint16_t val)460 l4util_add16(volatile l4_uint16_t *dest, l4_uint16_t val)
461 { __atomic_fetch_add(dest, val, __ATOMIC_SEQ_CST); }
462 
463 L4_INLINE void
l4util_add32(volatile l4_uint32_t * dest,l4_uint32_t val)464 l4util_add32(volatile l4_uint32_t *dest, l4_uint32_t val)
465 { __atomic_fetch_add(dest, val, __ATOMIC_SEQ_CST); }
466 
467 L4_INLINE void
l4util_atomic_add(volatile long * dest,long val)468 l4util_atomic_add(volatile long *dest, long val)
469 { __atomic_fetch_add(dest, val, __ATOMIC_SEQ_CST); }
470 
471 L4_INLINE void
l4util_sub8(volatile l4_uint8_t * dest,l4_uint8_t val)472 l4util_sub8(volatile l4_uint8_t *dest, l4_uint8_t val)
473 { __atomic_fetch_sub(dest, val, __ATOMIC_SEQ_CST); }
474 
475 L4_INLINE void
l4util_sub16(volatile l4_uint16_t * dest,l4_uint16_t val)476 l4util_sub16(volatile l4_uint16_t *dest, l4_uint16_t val)
477 { __atomic_fetch_sub(dest, val, __ATOMIC_SEQ_CST); }
478 
479 L4_INLINE void
l4util_sub32(volatile l4_uint32_t * dest,l4_uint32_t val)480 l4util_sub32(volatile l4_uint32_t *dest, l4_uint32_t val)
481 { __atomic_fetch_sub(dest, val, __ATOMIC_SEQ_CST); }
482 
483 L4_INLINE void
l4util_and8(volatile l4_uint8_t * dest,l4_uint8_t val)484 l4util_and8(volatile l4_uint8_t *dest, l4_uint8_t val)
485 { __atomic_fetch_and(dest, val, __ATOMIC_SEQ_CST); }
486 
487 L4_INLINE void
l4util_and16(volatile l4_uint16_t * dest,l4_uint16_t val)488 l4util_and16(volatile l4_uint16_t *dest, l4_uint16_t val)
489 { __atomic_fetch_and(dest, val, __ATOMIC_SEQ_CST); }
490 
491 L4_INLINE void
l4util_and32(volatile l4_uint32_t * dest,l4_uint32_t val)492 l4util_and32(volatile l4_uint32_t *dest, l4_uint32_t val)
493 { __atomic_fetch_and(dest, val, __ATOMIC_SEQ_CST); }
494 
495 L4_INLINE void
l4util_or8(volatile l4_uint8_t * dest,l4_uint8_t val)496 l4util_or8(volatile l4_uint8_t *dest, l4_uint8_t val)
497 { __atomic_fetch_or(dest, val, __ATOMIC_SEQ_CST); }
498 
499 L4_INLINE void
l4util_or16(volatile l4_uint16_t * dest,l4_uint16_t val)500 l4util_or16(volatile l4_uint16_t *dest, l4_uint16_t val)
501 { __atomic_fetch_or(dest, val, __ATOMIC_SEQ_CST); }
502 
503 L4_INLINE void
l4util_or32(volatile l4_uint32_t * dest,l4_uint32_t val)504 l4util_or32(volatile l4_uint32_t *dest, l4_uint32_t val)
505 { __atomic_fetch_or(dest, val, __ATOMIC_SEQ_CST); }
506 
507 L4_INLINE l4_uint8_t
l4util_add8_res(volatile l4_uint8_t * dest,l4_uint8_t val)508 l4util_add8_res(volatile l4_uint8_t *dest, l4_uint8_t val)
509 { return __atomic_add_fetch(dest, val, __ATOMIC_SEQ_CST); }
510 
511 L4_INLINE l4_uint16_t
l4util_add16_res(volatile l4_uint16_t * dest,l4_uint16_t val)512 l4util_add16_res(volatile l4_uint16_t *dest, l4_uint16_t val)
513 { return __atomic_add_fetch(dest, val, __ATOMIC_SEQ_CST); }
514 
515 L4_INLINE l4_uint32_t
l4util_add32_res(volatile l4_uint32_t * dest,l4_uint32_t val)516 l4util_add32_res(volatile l4_uint32_t *dest, l4_uint32_t val)
517 { return __atomic_add_fetch(dest, val, __ATOMIC_SEQ_CST); }
518 
519 L4_INLINE l4_uint8_t
l4util_sub8_res(volatile l4_uint8_t * dest,l4_uint8_t val)520 l4util_sub8_res(volatile l4_uint8_t *dest, l4_uint8_t val)
521 { return __atomic_sub_fetch(dest, val, __ATOMIC_SEQ_CST); }
522 
523 L4_INLINE l4_uint16_t
l4util_sub16_res(volatile l4_uint16_t * dest,l4_uint16_t val)524 l4util_sub16_res(volatile l4_uint16_t *dest, l4_uint16_t val)
525 { return __atomic_sub_fetch(dest, val, __ATOMIC_SEQ_CST); }
526 
527 L4_INLINE l4_uint32_t
l4util_sub32_res(volatile l4_uint32_t * dest,l4_uint32_t val)528 l4util_sub32_res(volatile l4_uint32_t *dest, l4_uint32_t val)
529 { return __atomic_sub_fetch(dest, val, __ATOMIC_SEQ_CST); }
530 
531 L4_INLINE l4_uint8_t
l4util_and8_res(volatile l4_uint8_t * dest,l4_uint8_t val)532 l4util_and8_res(volatile l4_uint8_t *dest, l4_uint8_t val)
533 { return __atomic_and_fetch(dest, val, __ATOMIC_SEQ_CST); }
534 
535 L4_INLINE l4_uint16_t
l4util_and16_res(volatile l4_uint16_t * dest,l4_uint16_t val)536 l4util_and16_res(volatile l4_uint16_t *dest, l4_uint16_t val)
537 { return __atomic_and_fetch(dest, val, __ATOMIC_SEQ_CST); }
538 
539 L4_INLINE l4_uint32_t
l4util_and32_res(volatile l4_uint32_t * dest,l4_uint32_t val)540 l4util_and32_res(volatile l4_uint32_t *dest, l4_uint32_t val)
541 { return __atomic_and_fetch(dest, val, __ATOMIC_SEQ_CST); }
542 
543 L4_INLINE l4_uint8_t
l4util_or8_res(volatile l4_uint8_t * dest,l4_uint8_t val)544 l4util_or8_res(volatile l4_uint8_t *dest, l4_uint8_t val)
545 { return __atomic_or_fetch(dest, val, __ATOMIC_SEQ_CST); }
546 
547 L4_INLINE l4_uint16_t
l4util_or16_res(volatile l4_uint16_t * dest,l4_uint16_t val)548 l4util_or16_res(volatile l4_uint16_t *dest, l4_uint16_t val)
549 { return __atomic_or_fetch(dest, val, __ATOMIC_SEQ_CST); }
550 
551 L4_INLINE l4_uint32_t
l4util_or32_res(volatile l4_uint32_t * dest,l4_uint32_t val)552 l4util_or32_res(volatile l4_uint32_t *dest, l4_uint32_t val)
553 { return __atomic_or_fetch(dest, val, __ATOMIC_SEQ_CST); }
554 
555 #endif /* ! __L4UTIL__INCLUDE__ATOMIC_H__ */
556