1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2019 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library.  This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 *  This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#include <bits/atomic_base.h>
42#include <bits/move.h>
43
44namespace std _GLIBCXX_VISIBILITY(default)
45{
46_GLIBCXX_BEGIN_NAMESPACE_VERSION
47
48  /**
49   * @addtogroup atomics
50   * @{
51   */
52
53#if __cplusplus >= 201703L
54# define __cpp_lib_atomic_is_always_lock_free 201603
55#endif
56
57  template<typename _Tp>
58    struct atomic;
59
60  /// atomic<bool>
61  // NB: No operators or fetch-operations for this type.
62  template<>
63  struct atomic<bool>
64  {
65    using value_type = bool;
66
67  private:
68    __atomic_base<bool>	_M_base;
69
70  public:
71    atomic() noexcept = default;
72    ~atomic() noexcept = default;
73    atomic(const atomic&) = delete;
74    atomic& operator=(const atomic&) = delete;
75    atomic& operator=(const atomic&) volatile = delete;
76
77    constexpr atomic(bool __i) noexcept : _M_base(__i) { }
78
79    bool
80    operator=(bool __i) noexcept
81    { return _M_base.operator=(__i); }
82
83    bool
84    operator=(bool __i) volatile noexcept
85    { return _M_base.operator=(__i); }
86
87    operator bool() const noexcept
88    { return _M_base.load(); }
89
90    operator bool() const volatile noexcept
91    { return _M_base.load(); }
92
93    bool
94    is_lock_free() const noexcept { return _M_base.is_lock_free(); }
95
96    bool
97    is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
98
99#if __cplusplus >= 201703L
100    static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
101#endif
102
103    void
104    store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
105    { _M_base.store(__i, __m); }
106
107    void
108    store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
109    { _M_base.store(__i, __m); }
110
111    bool
112    load(memory_order __m = memory_order_seq_cst) const noexcept
113    { return _M_base.load(__m); }
114
115    bool
116    load(memory_order __m = memory_order_seq_cst) const volatile noexcept
117    { return _M_base.load(__m); }
118
119    bool
120    exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
121    { return _M_base.exchange(__i, __m); }
122
123    bool
124    exchange(bool __i,
125	     memory_order __m = memory_order_seq_cst) volatile noexcept
126    { return _M_base.exchange(__i, __m); }
127
128    bool
129    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
130			  memory_order __m2) noexcept
131    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
132
133    bool
134    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
135			  memory_order __m2) volatile noexcept
136    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
137
138    bool
139    compare_exchange_weak(bool& __i1, bool __i2,
140			  memory_order __m = memory_order_seq_cst) noexcept
141    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
142
143    bool
144    compare_exchange_weak(bool& __i1, bool __i2,
145		     memory_order __m = memory_order_seq_cst) volatile noexcept
146    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
147
148    bool
149    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
150			    memory_order __m2) noexcept
151    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
152
153    bool
154    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
155			    memory_order __m2) volatile noexcept
156    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
157
158    bool
159    compare_exchange_strong(bool& __i1, bool __i2,
160			    memory_order __m = memory_order_seq_cst) noexcept
161    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
162
163    bool
164    compare_exchange_strong(bool& __i1, bool __i2,
165		    memory_order __m = memory_order_seq_cst) volatile noexcept
166    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167  };
168
169
170  /**
171   *  @brief Generic atomic type, primary class template.
172   *
173   *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
174   */
175  template<typename _Tp>
176    struct atomic
177    {
178      using value_type = _Tp;
179
180    private:
181      // Align 1/2/4/8/16-byte types to at least their size.
182      static constexpr int _S_min_alignment
183	= (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
184	? 0 : sizeof(_Tp);
185
186      static constexpr int _S_alignment
187        = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
188
189      alignas(_S_alignment) _Tp _M_i;
190
191      static_assert(__is_trivially_copyable(_Tp),
192		    "std::atomic requires a trivially copyable type");
193
194      static_assert(sizeof(_Tp) > 0,
195		    "Incomplete or zero-sized types are not supported");
196
197    public:
198      atomic() noexcept = default;
199      ~atomic() noexcept = default;
200      atomic(const atomic&) = delete;
201      atomic& operator=(const atomic&) = delete;
202      atomic& operator=(const atomic&) volatile = delete;
203
204      constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
205
206      operator _Tp() const noexcept
207      { return load(); }
208
209      operator _Tp() const volatile noexcept
210      { return load(); }
211
212      _Tp
213      operator=(_Tp __i) noexcept
214      { store(__i); return __i; }
215
216      _Tp
217      operator=(_Tp __i) volatile noexcept
218      { store(__i); return __i; }
219
220      bool
221      is_lock_free() const noexcept
222      {
223	// Produce a fake, minimally aligned pointer.
224	return __atomic_is_lock_free(sizeof(_M_i),
225	    reinterpret_cast<void *>(-_S_alignment));
226      }
227
228      bool
229      is_lock_free() const volatile noexcept
230      {
231	// Produce a fake, minimally aligned pointer.
232	return __atomic_is_lock_free(sizeof(_M_i),
233	    reinterpret_cast<void *>(-_S_alignment));
234      }
235
236#if __cplusplus >= 201703L
237      static constexpr bool is_always_lock_free
238	= __atomic_always_lock_free(sizeof(_M_i), 0);
239#endif
240
241      void
242      store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
243      { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
244
245      void
246      store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
247      { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
248
249      _Tp
250      load(memory_order __m = memory_order_seq_cst) const noexcept
251      {
252	alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
253	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
254	__atomic_load(std::__addressof(_M_i), __ptr, int(__m));
255	return *__ptr;
256      }
257
258      _Tp
259      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
260      {
261        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
262	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
263	__atomic_load(std::__addressof(_M_i), __ptr, int(__m));
264	return *__ptr;
265      }
266
267      _Tp
268      exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
269      {
270        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
271	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
272	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
273			  __ptr, int(__m));
274	return *__ptr;
275      }
276
277      _Tp
278      exchange(_Tp __i,
279	       memory_order __m = memory_order_seq_cst) volatile noexcept
280      {
281        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
282	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
283	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
284			  __ptr, int(__m));
285	return *__ptr;
286      }
287
288      bool
289      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
290			    memory_order __f) noexcept
291      {
292	return __atomic_compare_exchange(std::__addressof(_M_i),
293					 std::__addressof(__e),
294					 std::__addressof(__i),
295					 true, int(__s), int(__f));
296      }
297
298      bool
299      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
300			    memory_order __f) volatile noexcept
301      {
302	return __atomic_compare_exchange(std::__addressof(_M_i),
303					 std::__addressof(__e),
304					 std::__addressof(__i),
305					 true, int(__s), int(__f));
306      }
307
308      bool
309      compare_exchange_weak(_Tp& __e, _Tp __i,
310			    memory_order __m = memory_order_seq_cst) noexcept
311      { return compare_exchange_weak(__e, __i, __m,
312                                     __cmpexch_failure_order(__m)); }
313
314      bool
315      compare_exchange_weak(_Tp& __e, _Tp __i,
316		     memory_order __m = memory_order_seq_cst) volatile noexcept
317      { return compare_exchange_weak(__e, __i, __m,
318                                     __cmpexch_failure_order(__m)); }
319
320      bool
321      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
322			      memory_order __f) noexcept
323      {
324	return __atomic_compare_exchange(std::__addressof(_M_i),
325					 std::__addressof(__e),
326					 std::__addressof(__i),
327					 false, int(__s), int(__f));
328      }
329
330      bool
331      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
332			      memory_order __f) volatile noexcept
333      {
334	return __atomic_compare_exchange(std::__addressof(_M_i),
335					 std::__addressof(__e),
336					 std::__addressof(__i),
337					 false, int(__s), int(__f));
338      }
339
340      bool
341      compare_exchange_strong(_Tp& __e, _Tp __i,
342			       memory_order __m = memory_order_seq_cst) noexcept
343      { return compare_exchange_strong(__e, __i, __m,
344                                       __cmpexch_failure_order(__m)); }
345
346      bool
347      compare_exchange_strong(_Tp& __e, _Tp __i,
348		     memory_order __m = memory_order_seq_cst) volatile noexcept
349      { return compare_exchange_strong(__e, __i, __m,
350                                       __cmpexch_failure_order(__m)); }
351    };
352
353
354  /// Partial specialization for pointer types.
355  template<typename _Tp>
356    struct atomic<_Tp*>
357    {
358      using value_type = _Tp*;
359      using difference_type = ptrdiff_t;
360
361      typedef _Tp* 			__pointer_type;
362      typedef __atomic_base<_Tp*>	__base_type;
363      __base_type			_M_b;
364
365      atomic() noexcept = default;
366      ~atomic() noexcept = default;
367      atomic(const atomic&) = delete;
368      atomic& operator=(const atomic&) = delete;
369      atomic& operator=(const atomic&) volatile = delete;
370
371      constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
372
373      operator __pointer_type() const noexcept
374      { return __pointer_type(_M_b); }
375
376      operator __pointer_type() const volatile noexcept
377      { return __pointer_type(_M_b); }
378
379      __pointer_type
380      operator=(__pointer_type __p) noexcept
381      { return _M_b.operator=(__p); }
382
383      __pointer_type
384      operator=(__pointer_type __p) volatile noexcept
385      { return _M_b.operator=(__p); }
386
387      __pointer_type
388      operator++(int) noexcept
389      {
390#if __cplusplus >= 201703L
391	static_assert( is_object<_Tp>::value, "pointer to object type" );
392#endif
393	return _M_b++;
394      }
395
396      __pointer_type
397      operator++(int) volatile noexcept
398      {
399#if __cplusplus >= 201703L
400	static_assert( is_object<_Tp>::value, "pointer to object type" );
401#endif
402	return _M_b++;
403      }
404
405      __pointer_type
406      operator--(int) noexcept
407      {
408#if __cplusplus >= 201703L
409	static_assert( is_object<_Tp>::value, "pointer to object type" );
410#endif
411	return _M_b--;
412      }
413
414      __pointer_type
415      operator--(int) volatile noexcept
416      {
417#if __cplusplus >= 201703L
418	static_assert( is_object<_Tp>::value, "pointer to object type" );
419#endif
420	return _M_b--;
421      }
422
423      __pointer_type
424      operator++() noexcept
425      {
426#if __cplusplus >= 201703L
427	static_assert( is_object<_Tp>::value, "pointer to object type" );
428#endif
429	return ++_M_b;
430      }
431
432      __pointer_type
433      operator++() volatile noexcept
434      {
435#if __cplusplus >= 201703L
436	static_assert( is_object<_Tp>::value, "pointer to object type" );
437#endif
438	return ++_M_b;
439      }
440
441      __pointer_type
442      operator--() noexcept
443      {
444#if __cplusplus >= 201703L
445	static_assert( is_object<_Tp>::value, "pointer to object type" );
446#endif
447	return --_M_b;
448      }
449
450      __pointer_type
451      operator--() volatile noexcept
452      {
453#if __cplusplus >= 201703L
454	static_assert( is_object<_Tp>::value, "pointer to object type" );
455#endif
456	return --_M_b;
457      }
458
459      __pointer_type
460      operator+=(ptrdiff_t __d) noexcept
461      {
462#if __cplusplus >= 201703L
463	static_assert( is_object<_Tp>::value, "pointer to object type" );
464#endif
465	return _M_b.operator+=(__d);
466      }
467
468      __pointer_type
469      operator+=(ptrdiff_t __d) volatile noexcept
470      {
471#if __cplusplus >= 201703L
472	static_assert( is_object<_Tp>::value, "pointer to object type" );
473#endif
474	return _M_b.operator+=(__d);
475      }
476
477      __pointer_type
478      operator-=(ptrdiff_t __d) noexcept
479      {
480#if __cplusplus >= 201703L
481	static_assert( is_object<_Tp>::value, "pointer to object type" );
482#endif
483	return _M_b.operator-=(__d);
484      }
485
486      __pointer_type
487      operator-=(ptrdiff_t __d) volatile noexcept
488      {
489#if __cplusplus >= 201703L
490	static_assert( is_object<_Tp>::value, "pointer to object type" );
491#endif
492	return _M_b.operator-=(__d);
493      }
494
495      bool
496      is_lock_free() const noexcept
497      { return _M_b.is_lock_free(); }
498
499      bool
500      is_lock_free() const volatile noexcept
501      { return _M_b.is_lock_free(); }
502
503#if __cplusplus >= 201703L
504    static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
505#endif
506
507      void
508      store(__pointer_type __p,
509	    memory_order __m = memory_order_seq_cst) noexcept
510      { return _M_b.store(__p, __m); }
511
512      void
513      store(__pointer_type __p,
514	    memory_order __m = memory_order_seq_cst) volatile noexcept
515      { return _M_b.store(__p, __m); }
516
517      __pointer_type
518      load(memory_order __m = memory_order_seq_cst) const noexcept
519      { return _M_b.load(__m); }
520
521      __pointer_type
522      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
523      { return _M_b.load(__m); }
524
525      __pointer_type
526      exchange(__pointer_type __p,
527	       memory_order __m = memory_order_seq_cst) noexcept
528      { return _M_b.exchange(__p, __m); }
529
530      __pointer_type
531      exchange(__pointer_type __p,
532	       memory_order __m = memory_order_seq_cst) volatile noexcept
533      { return _M_b.exchange(__p, __m); }
534
535      bool
536      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
537			    memory_order __m1, memory_order __m2) noexcept
538      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
539
540      bool
541      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
542			    memory_order __m1,
543			    memory_order __m2) volatile noexcept
544      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
545
546      bool
547      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
548			    memory_order __m = memory_order_seq_cst) noexcept
549      {
550	return compare_exchange_weak(__p1, __p2, __m,
551				     __cmpexch_failure_order(__m));
552      }
553
554      bool
555      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
556		    memory_order __m = memory_order_seq_cst) volatile noexcept
557      {
558	return compare_exchange_weak(__p1, __p2, __m,
559				     __cmpexch_failure_order(__m));
560      }
561
562      bool
563      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
564			      memory_order __m1, memory_order __m2) noexcept
565      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
566
567      bool
568      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
569			      memory_order __m1,
570			      memory_order __m2) volatile noexcept
571      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
572
573      bool
574      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
575			      memory_order __m = memory_order_seq_cst) noexcept
576      {
577	return _M_b.compare_exchange_strong(__p1, __p2, __m,
578					    __cmpexch_failure_order(__m));
579      }
580
581      bool
582      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
583		    memory_order __m = memory_order_seq_cst) volatile noexcept
584      {
585	return _M_b.compare_exchange_strong(__p1, __p2, __m,
586					    __cmpexch_failure_order(__m));
587      }
588
589      __pointer_type
590      fetch_add(ptrdiff_t __d,
591		memory_order __m = memory_order_seq_cst) noexcept
592      {
593#if __cplusplus >= 201703L
594	static_assert( is_object<_Tp>::value, "pointer to object type" );
595#endif
596	return _M_b.fetch_add(__d, __m);
597      }
598
599      __pointer_type
600      fetch_add(ptrdiff_t __d,
601		memory_order __m = memory_order_seq_cst) volatile noexcept
602      {
603#if __cplusplus >= 201703L
604	static_assert( is_object<_Tp>::value, "pointer to object type" );
605#endif
606	return _M_b.fetch_add(__d, __m);
607      }
608
609      __pointer_type
610      fetch_sub(ptrdiff_t __d,
611		memory_order __m = memory_order_seq_cst) noexcept
612      {
613#if __cplusplus >= 201703L
614	static_assert( is_object<_Tp>::value, "pointer to object type" );
615#endif
616	return _M_b.fetch_sub(__d, __m);
617      }
618
619      __pointer_type
620      fetch_sub(ptrdiff_t __d,
621		memory_order __m = memory_order_seq_cst) volatile noexcept
622      {
623#if __cplusplus >= 201703L
624	static_assert( is_object<_Tp>::value, "pointer to object type" );
625#endif
626	return _M_b.fetch_sub(__d, __m);
627      }
628    };
629
630
631  /// Explicit specialization for char.
632  template<>
633    struct atomic<char> : __atomic_base<char>
634    {
635      typedef char 			__integral_type;
636      typedef __atomic_base<char> 	__base_type;
637
638      atomic() noexcept = default;
639      ~atomic() noexcept = default;
640      atomic(const atomic&) = delete;
641      atomic& operator=(const atomic&) = delete;
642      atomic& operator=(const atomic&) volatile = delete;
643
644      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
645
646      using __base_type::operator __integral_type;
647      using __base_type::operator=;
648
649#if __cplusplus >= 201703L
650    static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
651#endif
652    };
653
654  /// Explicit specialization for signed char.
655  template<>
656    struct atomic<signed char> : __atomic_base<signed char>
657    {
658      typedef signed char 		__integral_type;
659      typedef __atomic_base<signed char> 	__base_type;
660
661      atomic() noexcept= default;
662      ~atomic() noexcept = default;
663      atomic(const atomic&) = delete;
664      atomic& operator=(const atomic&) = delete;
665      atomic& operator=(const atomic&) volatile = delete;
666
667      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
668
669      using __base_type::operator __integral_type;
670      using __base_type::operator=;
671
672#if __cplusplus >= 201703L
673    static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
674#endif
675    };
676
677  /// Explicit specialization for unsigned char.
678  template<>
679    struct atomic<unsigned char> : __atomic_base<unsigned char>
680    {
681      typedef unsigned char 		__integral_type;
682      typedef __atomic_base<unsigned char> 	__base_type;
683
684      atomic() noexcept= default;
685      ~atomic() noexcept = default;
686      atomic(const atomic&) = delete;
687      atomic& operator=(const atomic&) = delete;
688      atomic& operator=(const atomic&) volatile = delete;
689
690      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
691
692      using __base_type::operator __integral_type;
693      using __base_type::operator=;
694
695#if __cplusplus >= 201703L
696    static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
697#endif
698    };
699
700  /// Explicit specialization for short.
701  template<>
702    struct atomic<short> : __atomic_base<short>
703    {
704      typedef short 			__integral_type;
705      typedef __atomic_base<short> 		__base_type;
706
707      atomic() noexcept = default;
708      ~atomic() noexcept = default;
709      atomic(const atomic&) = delete;
710      atomic& operator=(const atomic&) = delete;
711      atomic& operator=(const atomic&) volatile = delete;
712
713      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
714
715      using __base_type::operator __integral_type;
716      using __base_type::operator=;
717
718#if __cplusplus >= 201703L
719    static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
720#endif
721    };
722
723  /// Explicit specialization for unsigned short.
724  template<>
725    struct atomic<unsigned short> : __atomic_base<unsigned short>
726    {
727      typedef unsigned short 	      	__integral_type;
728      typedef __atomic_base<unsigned short> 		__base_type;
729
730      atomic() noexcept = default;
731      ~atomic() noexcept = default;
732      atomic(const atomic&) = delete;
733      atomic& operator=(const atomic&) = delete;
734      atomic& operator=(const atomic&) volatile = delete;
735
736      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
737
738      using __base_type::operator __integral_type;
739      using __base_type::operator=;
740
741#if __cplusplus >= 201703L
742    static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
743#endif
744    };
745
746  /// Explicit specialization for int.
747  template<>
748    struct atomic<int> : __atomic_base<int>
749    {
750      typedef int 			__integral_type;
751      typedef __atomic_base<int> 		__base_type;
752
753      atomic() noexcept = default;
754      ~atomic() noexcept = default;
755      atomic(const atomic&) = delete;
756      atomic& operator=(const atomic&) = delete;
757      atomic& operator=(const atomic&) volatile = delete;
758
759      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
760
761      using __base_type::operator __integral_type;
762      using __base_type::operator=;
763
764#if __cplusplus >= 201703L
765    static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
766#endif
767    };
768
769  /// Explicit specialization for unsigned int.
770  template<>
771    struct atomic<unsigned int> : __atomic_base<unsigned int>
772    {
773      typedef unsigned int		__integral_type;
774      typedef __atomic_base<unsigned int> 	__base_type;
775
776      atomic() noexcept = default;
777      ~atomic() noexcept = default;
778      atomic(const atomic&) = delete;
779      atomic& operator=(const atomic&) = delete;
780      atomic& operator=(const atomic&) volatile = delete;
781
782      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
783
784      using __base_type::operator __integral_type;
785      using __base_type::operator=;
786
787#if __cplusplus >= 201703L
788    static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
789#endif
790    };
791
792  /// Explicit specialization for long.
793  template<>
794    struct atomic<long> : __atomic_base<long>
795    {
796      typedef long 			__integral_type;
797      typedef __atomic_base<long> 	__base_type;
798
799      atomic() noexcept = default;
800      ~atomic() noexcept = default;
801      atomic(const atomic&) = delete;
802      atomic& operator=(const atomic&) = delete;
803      atomic& operator=(const atomic&) volatile = delete;
804
805      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
806
807      using __base_type::operator __integral_type;
808      using __base_type::operator=;
809
810#if __cplusplus >= 201703L
811    static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
812#endif
813    };
814
815  /// Explicit specialization for unsigned long.
816  template<>
817    struct atomic<unsigned long> : __atomic_base<unsigned long>
818    {
819      typedef unsigned long 		__integral_type;
820      typedef __atomic_base<unsigned long> 	__base_type;
821
822      atomic() noexcept = default;
823      ~atomic() noexcept = default;
824      atomic(const atomic&) = delete;
825      atomic& operator=(const atomic&) = delete;
826      atomic& operator=(const atomic&) volatile = delete;
827
828      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
829
830      using __base_type::operator __integral_type;
831      using __base_type::operator=;
832
833#if __cplusplus >= 201703L
834    static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
835#endif
836    };
837
838  /// Explicit specialization for long long.
839  template<>
840    struct atomic<long long> : __atomic_base<long long>
841    {
842      typedef long long 		__integral_type;
843      typedef __atomic_base<long long> 		__base_type;
844
845      atomic() noexcept = default;
846      ~atomic() noexcept = default;
847      atomic(const atomic&) = delete;
848      atomic& operator=(const atomic&) = delete;
849      atomic& operator=(const atomic&) volatile = delete;
850
851      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
852
853      using __base_type::operator __integral_type;
854      using __base_type::operator=;
855
856#if __cplusplus >= 201703L
857    static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
858#endif
859    };
860
861  /// Explicit specialization for unsigned long long.
862  template<>
863    struct atomic<unsigned long long> : __atomic_base<unsigned long long>
864    {
865      typedef unsigned long long       	__integral_type;
866      typedef __atomic_base<unsigned long long> 	__base_type;
867
868      atomic() noexcept = default;
869      ~atomic() noexcept = default;
870      atomic(const atomic&) = delete;
871      atomic& operator=(const atomic&) = delete;
872      atomic& operator=(const atomic&) volatile = delete;
873
874      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
875
876      using __base_type::operator __integral_type;
877      using __base_type::operator=;
878
879#if __cplusplus >= 201703L
880    static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
881#endif
882    };
883
884  /// Explicit specialization for wchar_t.
885  template<>
886    struct atomic<wchar_t> : __atomic_base<wchar_t>
887    {
888      typedef wchar_t 			__integral_type;
889      typedef __atomic_base<wchar_t> 	__base_type;
890
891      atomic() noexcept = default;
892      ~atomic() noexcept = default;
893      atomic(const atomic&) = delete;
894      atomic& operator=(const atomic&) = delete;
895      atomic& operator=(const atomic&) volatile = delete;
896
897      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
898
899      using __base_type::operator __integral_type;
900      using __base_type::operator=;
901
902#if __cplusplus >= 201703L
903    static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
904#endif
905    };
906
907#ifdef _GLIBCXX_USE_CHAR8_T
908  /// Explicit specialization for char8_t.
909  template<>
910    struct atomic<char8_t> : __atomic_base<char8_t>
911    {
912      typedef char8_t 			__integral_type;
913      typedef __atomic_base<char8_t> 	__base_type;
914
915      atomic() noexcept = default;
916      ~atomic() noexcept = default;
917      atomic(const atomic&) = delete;
918      atomic& operator=(const atomic&) = delete;
919      atomic& operator=(const atomic&) volatile = delete;
920
921      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
922
923      using __base_type::operator __integral_type;
924      using __base_type::operator=;
925
926#if __cplusplus > 201402L
927    static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
928#endif
929    };
930#endif
931
932  /// Explicit specialization for char16_t.
933  template<>
934    struct atomic<char16_t> : __atomic_base<char16_t>
935    {
936      typedef char16_t 			__integral_type;
937      typedef __atomic_base<char16_t> 	__base_type;
938
939      atomic() noexcept = default;
940      ~atomic() noexcept = default;
941      atomic(const atomic&) = delete;
942      atomic& operator=(const atomic&) = delete;
943      atomic& operator=(const atomic&) volatile = delete;
944
945      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
946
947      using __base_type::operator __integral_type;
948      using __base_type::operator=;
949
950#if __cplusplus >= 201703L
951    static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
952#endif
953    };
954
955  /// Explicit specialization for char32_t.
956  template<>
957    struct atomic<char32_t> : __atomic_base<char32_t>
958    {
959      typedef char32_t 			__integral_type;
960      typedef __atomic_base<char32_t> 	__base_type;
961
962      atomic() noexcept = default;
963      ~atomic() noexcept = default;
964      atomic(const atomic&) = delete;
965      atomic& operator=(const atomic&) = delete;
966      atomic& operator=(const atomic&) volatile = delete;
967
968      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
969
970      using __base_type::operator __integral_type;
971      using __base_type::operator=;
972
973#if __cplusplus >= 201703L
974    static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
975#endif
976    };
977
978
979  /// atomic_bool
980  typedef atomic<bool>			atomic_bool;
981
982  /// atomic_char
983  typedef atomic<char>			atomic_char;
984
985  /// atomic_schar
986  typedef atomic<signed char>		atomic_schar;
987
988  /// atomic_uchar
989  typedef atomic<unsigned char>		atomic_uchar;
990
991  /// atomic_short
992  typedef atomic<short>			atomic_short;
993
994  /// atomic_ushort
995  typedef atomic<unsigned short>	atomic_ushort;
996
997  /// atomic_int
998  typedef atomic<int>			atomic_int;
999
1000  /// atomic_uint
1001  typedef atomic<unsigned int>		atomic_uint;
1002
1003  /// atomic_long
1004  typedef atomic<long>			atomic_long;
1005
1006  /// atomic_ulong
1007  typedef atomic<unsigned long>		atomic_ulong;
1008
1009  /// atomic_llong
1010  typedef atomic<long long>		atomic_llong;
1011
1012  /// atomic_ullong
1013  typedef atomic<unsigned long long>	atomic_ullong;
1014
1015  /// atomic_wchar_t
1016  typedef atomic<wchar_t>		atomic_wchar_t;
1017
1018#ifdef _GLIBCXX_USE_CHAR8_T
1019  /// atomic_char8_t
1020  typedef atomic<char8_t>		atomic_char8_t;
1021#endif
1022
1023  /// atomic_char16_t
1024  typedef atomic<char16_t>		atomic_char16_t;
1025
1026  /// atomic_char32_t
1027  typedef atomic<char32_t>		atomic_char32_t;
1028
1029#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1030  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1031  // 2441. Exact-width atomic typedefs should be provided
1032
1033  /// atomic_int8_t
1034  typedef atomic<int8_t>		atomic_int8_t;
1035
1036  /// atomic_uint8_t
1037  typedef atomic<uint8_t>		atomic_uint8_t;
1038
1039  /// atomic_int16_t
1040  typedef atomic<int16_t>		atomic_int16_t;
1041
1042  /// atomic_uint16_t
1043  typedef atomic<uint16_t>		atomic_uint16_t;
1044
1045  /// atomic_int32_t
1046  typedef atomic<int32_t>		atomic_int32_t;
1047
1048  /// atomic_uint32_t
1049  typedef atomic<uint32_t>		atomic_uint32_t;
1050
1051  /// atomic_int64_t
1052  typedef atomic<int64_t>		atomic_int64_t;
1053
1054  /// atomic_uint64_t
1055  typedef atomic<uint64_t>		atomic_uint64_t;
1056
1057
1058  /// atomic_int_least8_t
1059  typedef atomic<int_least8_t>		atomic_int_least8_t;
1060
1061  /// atomic_uint_least8_t
1062  typedef atomic<uint_least8_t>		atomic_uint_least8_t;
1063
1064  /// atomic_int_least16_t
1065  typedef atomic<int_least16_t>		atomic_int_least16_t;
1066
1067  /// atomic_uint_least16_t
1068  typedef atomic<uint_least16_t>	atomic_uint_least16_t;
1069
1070  /// atomic_int_least32_t
1071  typedef atomic<int_least32_t>		atomic_int_least32_t;
1072
1073  /// atomic_uint_least32_t
1074  typedef atomic<uint_least32_t>	atomic_uint_least32_t;
1075
1076  /// atomic_int_least64_t
1077  typedef atomic<int_least64_t>		atomic_int_least64_t;
1078
1079  /// atomic_uint_least64_t
1080  typedef atomic<uint_least64_t>	atomic_uint_least64_t;
1081
1082
1083  /// atomic_int_fast8_t
1084  typedef atomic<int_fast8_t>		atomic_int_fast8_t;
1085
1086  /// atomic_uint_fast8_t
1087  typedef atomic<uint_fast8_t>		atomic_uint_fast8_t;
1088
1089  /// atomic_int_fast16_t
1090  typedef atomic<int_fast16_t>		atomic_int_fast16_t;
1091
1092  /// atomic_uint_fast16_t
1093  typedef atomic<uint_fast16_t>		atomic_uint_fast16_t;
1094
1095  /// atomic_int_fast32_t
1096  typedef atomic<int_fast32_t>		atomic_int_fast32_t;
1097
1098  /// atomic_uint_fast32_t
1099  typedef atomic<uint_fast32_t>		atomic_uint_fast32_t;
1100
1101  /// atomic_int_fast64_t
1102  typedef atomic<int_fast64_t>		atomic_int_fast64_t;
1103
1104  /// atomic_uint_fast64_t
1105  typedef atomic<uint_fast64_t>		atomic_uint_fast64_t;
1106#endif
1107
1108
1109  /// atomic_intptr_t
1110  typedef atomic<intptr_t>		atomic_intptr_t;
1111
1112  /// atomic_uintptr_t
1113  typedef atomic<uintptr_t>		atomic_uintptr_t;
1114
1115  /// atomic_size_t
1116  typedef atomic<size_t>		atomic_size_t;
1117
1118  /// atomic_ptrdiff_t
1119  typedef atomic<ptrdiff_t>		atomic_ptrdiff_t;
1120
1121#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1122  /// atomic_intmax_t
1123  typedef atomic<intmax_t>		atomic_intmax_t;
1124
1125  /// atomic_uintmax_t
1126  typedef atomic<uintmax_t>		atomic_uintmax_t;
1127#endif
1128
1129  // Function definitions, atomic_flag operations.
1130  inline bool
1131  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1132				    memory_order __m) noexcept
1133  { return __a->test_and_set(__m); }
1134
1135  inline bool
1136  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1137				    memory_order __m) noexcept
1138  { return __a->test_and_set(__m); }
1139
1140  inline void
1141  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1142  { __a->clear(__m); }
1143
1144  inline void
1145  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1146			     memory_order __m) noexcept
1147  { __a->clear(__m); }
1148
1149  inline bool
1150  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1151  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1152
1153  inline bool
1154  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1155  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1156
1157  inline void
1158  atomic_flag_clear(atomic_flag* __a) noexcept
1159  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1160
1161  inline void
1162  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1163  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1164
1165
1166  template<typename _Tp>
1167    using __atomic_val_t = typename atomic<_Tp>::value_type;
1168  template<typename _Tp>
1169    using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1170
1171  // [atomics.nonmembers] Non-member functions.
1172  // Function templates generally applicable to atomic types.
1173  template<typename _ITp>
1174    inline bool
1175    atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1176    { return __a->is_lock_free(); }
1177
1178  template<typename _ITp>
1179    inline bool
1180    atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1181    { return __a->is_lock_free(); }
1182
1183  template<typename _ITp>
1184    inline void
1185    atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1186    { __a->store(__i, memory_order_relaxed); }
1187
1188  template<typename _ITp>
1189    inline void
1190    atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1191    { __a->store(__i, memory_order_relaxed); }
1192
1193  template<typename _ITp>
1194    inline void
1195    atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1196			  memory_order __m) noexcept
1197    { __a->store(__i, __m); }
1198
1199  template<typename _ITp>
1200    inline void
1201    atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1202			  memory_order __m) noexcept
1203    { __a->store(__i, __m); }
1204
1205  template<typename _ITp>
1206    inline _ITp
1207    atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1208    { return __a->load(__m); }
1209
1210  template<typename _ITp>
1211    inline _ITp
1212    atomic_load_explicit(const volatile atomic<_ITp>* __a,
1213			 memory_order __m) noexcept
1214    { return __a->load(__m); }
1215
1216  template<typename _ITp>
1217    inline _ITp
1218    atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1219			     memory_order __m) noexcept
1220    { return __a->exchange(__i, __m); }
1221
1222  template<typename _ITp>
1223    inline _ITp
1224    atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1225			     __atomic_val_t<_ITp> __i,
1226			     memory_order __m) noexcept
1227    { return __a->exchange(__i, __m); }
1228
1229  template<typename _ITp>
1230    inline bool
1231    atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1232					  __atomic_val_t<_ITp>* __i1,
1233					  __atomic_val_t<_ITp> __i2,
1234					  memory_order __m1,
1235					  memory_order __m2) noexcept
1236    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1237
1238  template<typename _ITp>
1239    inline bool
1240    atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1241					  __atomic_val_t<_ITp>* __i1,
1242					  __atomic_val_t<_ITp> __i2,
1243					  memory_order __m1,
1244					  memory_order __m2) noexcept
1245    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1246
1247  template<typename _ITp>
1248    inline bool
1249    atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1250					    __atomic_val_t<_ITp>* __i1,
1251					    __atomic_val_t<_ITp> __i2,
1252					    memory_order __m1,
1253					    memory_order __m2) noexcept
1254    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1255
1256  template<typename _ITp>
1257    inline bool
1258    atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1259					    __atomic_val_t<_ITp>* __i1,
1260					    __atomic_val_t<_ITp> __i2,
1261					    memory_order __m1,
1262					    memory_order __m2) noexcept
1263    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1264
1265
1266  template<typename _ITp>
1267    inline void
1268    atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1269    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1270
1271  template<typename _ITp>
1272    inline void
1273    atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1274    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1275
1276  template<typename _ITp>
1277    inline _ITp
1278    atomic_load(const atomic<_ITp>* __a) noexcept
1279    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1280
1281  template<typename _ITp>
1282    inline _ITp
1283    atomic_load(const volatile atomic<_ITp>* __a) noexcept
1284    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1285
1286  template<typename _ITp>
1287    inline _ITp
1288    atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1289    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1290
1291  template<typename _ITp>
1292    inline _ITp
1293    atomic_exchange(volatile atomic<_ITp>* __a,
1294		    __atomic_val_t<_ITp> __i) noexcept
1295    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1296
1297  template<typename _ITp>
1298    inline bool
1299    atomic_compare_exchange_weak(atomic<_ITp>* __a,
1300				 __atomic_val_t<_ITp>* __i1,
1301				 __atomic_val_t<_ITp> __i2) noexcept
1302    {
1303      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1304						   memory_order_seq_cst,
1305						   memory_order_seq_cst);
1306    }
1307
1308  template<typename _ITp>
1309    inline bool
1310    atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1311				 __atomic_val_t<_ITp>* __i1,
1312				 __atomic_val_t<_ITp> __i2) noexcept
1313    {
1314      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1315						   memory_order_seq_cst,
1316						   memory_order_seq_cst);
1317    }
1318
1319  template<typename _ITp>
1320    inline bool
1321    atomic_compare_exchange_strong(atomic<_ITp>* __a,
1322				   __atomic_val_t<_ITp>* __i1,
1323				   __atomic_val_t<_ITp> __i2) noexcept
1324    {
1325      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1326						     memory_order_seq_cst,
1327						     memory_order_seq_cst);
1328    }
1329
1330  template<typename _ITp>
1331    inline bool
1332    atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1333				   __atomic_val_t<_ITp>* __i1,
1334				   __atomic_val_t<_ITp> __i2) noexcept
1335    {
1336      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1337						     memory_order_seq_cst,
1338						     memory_order_seq_cst);
1339    }
1340
1341  // Function templates for atomic_integral and atomic_pointer operations only.
1342  // Some operations (and, or, xor) are only available for atomic integrals,
1343  // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1344
1345  template<typename _ITp>
1346    inline _ITp
1347    atomic_fetch_add_explicit(atomic<_ITp>* __a,
1348			      __atomic_diff_t<_ITp> __i,
1349			      memory_order __m) noexcept
1350    { return __a->fetch_add(__i, __m); }
1351
1352  template<typename _ITp>
1353    inline _ITp
1354    atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1355			      __atomic_diff_t<_ITp> __i,
1356			      memory_order __m) noexcept
1357    { return __a->fetch_add(__i, __m); }
1358
1359  template<typename _ITp>
1360    inline _ITp
1361    atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1362			      __atomic_diff_t<_ITp> __i,
1363			      memory_order __m) noexcept
1364    { return __a->fetch_sub(__i, __m); }
1365
1366  template<typename _ITp>
1367    inline _ITp
1368    atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1369			      __atomic_diff_t<_ITp> __i,
1370			      memory_order __m) noexcept
1371    { return __a->fetch_sub(__i, __m); }
1372
1373  template<typename _ITp>
1374    inline _ITp
1375    atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1376			      __atomic_val_t<_ITp> __i,
1377			      memory_order __m) noexcept
1378    { return __a->fetch_and(__i, __m); }
1379
1380  template<typename _ITp>
1381    inline _ITp
1382    atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1383			      __atomic_val_t<_ITp> __i,
1384			      memory_order __m) noexcept
1385    { return __a->fetch_and(__i, __m); }
1386
1387  template<typename _ITp>
1388    inline _ITp
1389    atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1390			     __atomic_val_t<_ITp> __i,
1391			     memory_order __m) noexcept
1392    { return __a->fetch_or(__i, __m); }
1393
1394  template<typename _ITp>
1395    inline _ITp
1396    atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1397			     __atomic_val_t<_ITp> __i,
1398			     memory_order __m) noexcept
1399    { return __a->fetch_or(__i, __m); }
1400
1401  template<typename _ITp>
1402    inline _ITp
1403    atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1404			      __atomic_val_t<_ITp> __i,
1405			      memory_order __m) noexcept
1406    { return __a->fetch_xor(__i, __m); }
1407
1408  template<typename _ITp>
1409    inline _ITp
1410    atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1411			      __atomic_val_t<_ITp> __i,
1412			      memory_order __m) noexcept
1413    { return __a->fetch_xor(__i, __m); }
1414
1415  template<typename _ITp>
1416    inline _ITp
1417    atomic_fetch_add(atomic<_ITp>* __a,
1418		     __atomic_diff_t<_ITp> __i) noexcept
1419    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1420
1421  template<typename _ITp>
1422    inline _ITp
1423    atomic_fetch_add(volatile atomic<_ITp>* __a,
1424		     __atomic_diff_t<_ITp> __i) noexcept
1425    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1426
1427  template<typename _ITp>
1428    inline _ITp
1429    atomic_fetch_sub(atomic<_ITp>* __a,
1430		     __atomic_diff_t<_ITp> __i) noexcept
1431    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1432
1433  template<typename _ITp>
1434    inline _ITp
1435    atomic_fetch_sub(volatile atomic<_ITp>* __a,
1436		     __atomic_diff_t<_ITp> __i) noexcept
1437    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1438
1439  template<typename _ITp>
1440    inline _ITp
1441    atomic_fetch_and(__atomic_base<_ITp>* __a,
1442		     __atomic_val_t<_ITp> __i) noexcept
1443    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1444
1445  template<typename _ITp>
1446    inline _ITp
1447    atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1448		     __atomic_val_t<_ITp> __i) noexcept
1449    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1450
1451  template<typename _ITp>
1452    inline _ITp
1453    atomic_fetch_or(__atomic_base<_ITp>* __a,
1454		    __atomic_val_t<_ITp> __i) noexcept
1455    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1456
1457  template<typename _ITp>
1458    inline _ITp
1459    atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1460		    __atomic_val_t<_ITp> __i) noexcept
1461    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1462
1463  template<typename _ITp>
1464    inline _ITp
1465    atomic_fetch_xor(__atomic_base<_ITp>* __a,
1466		     __atomic_val_t<_ITp> __i) noexcept
1467    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1468
1469  template<typename _ITp>
1470    inline _ITp
1471    atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1472		     __atomic_val_t<_ITp> __i) noexcept
1473    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1474
1475  /// @} group atomics
1476
1477_GLIBCXX_END_NAMESPACE_VERSION
1478} // namespace
1479
1480#endif // C++11
1481
1482#endif // _GLIBCXX_ATOMIC
1483