1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2015 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library.  This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 *  This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#include <bits/atomic_base.h>
42
43namespace std _GLIBCXX_VISIBILITY(default)
44{
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
46
47  /**
48   * @addtogroup atomics
49   * @{
50   */
51
52  template<typename _Tp>
53    struct atomic;
54
55  /// atomic<bool>
56  // NB: No operators or fetch-operations for this type.
57  template<>
58  struct atomic<bool>
59  {
60  private:
61    __atomic_base<bool>	_M_base;
62
63  public:
64    atomic() noexcept = default;
65    ~atomic() noexcept = default;
66    atomic(const atomic&) = delete;
67    atomic& operator=(const atomic&) = delete;
68    atomic& operator=(const atomic&) volatile = delete;
69
70    constexpr atomic(bool __i) noexcept : _M_base(__i) { }
71
72    bool
73    operator=(bool __i) noexcept
74    { return _M_base.operator=(__i); }
75
76    bool
77    operator=(bool __i) volatile noexcept
78    { return _M_base.operator=(__i); }
79
80    operator bool() const noexcept
81    { return _M_base.load(); }
82
83    operator bool() const volatile noexcept
84    { return _M_base.load(); }
85
86    bool
87    is_lock_free() const noexcept { return _M_base.is_lock_free(); }
88
89    bool
90    is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
91
92    void
93    store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
94    { _M_base.store(__i, __m); }
95
96    void
97    store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
98    { _M_base.store(__i, __m); }
99
100    bool
101    load(memory_order __m = memory_order_seq_cst) const noexcept
102    { return _M_base.load(__m); }
103
104    bool
105    load(memory_order __m = memory_order_seq_cst) const volatile noexcept
106    { return _M_base.load(__m); }
107
108    bool
109    exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
110    { return _M_base.exchange(__i, __m); }
111
112    bool
113    exchange(bool __i,
114	     memory_order __m = memory_order_seq_cst) volatile noexcept
115    { return _M_base.exchange(__i, __m); }
116
117    bool
118    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
119			  memory_order __m2) noexcept
120    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
121
122    bool
123    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
124			  memory_order __m2) volatile noexcept
125    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
126
127    bool
128    compare_exchange_weak(bool& __i1, bool __i2,
129			  memory_order __m = memory_order_seq_cst) noexcept
130    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
131
132    bool
133    compare_exchange_weak(bool& __i1, bool __i2,
134		     memory_order __m = memory_order_seq_cst) volatile noexcept
135    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
136
137    bool
138    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
139			    memory_order __m2) noexcept
140    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
141
142    bool
143    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
144			    memory_order __m2) volatile noexcept
145    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
146
147    bool
148    compare_exchange_strong(bool& __i1, bool __i2,
149			    memory_order __m = memory_order_seq_cst) noexcept
150    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
151
152    bool
153    compare_exchange_strong(bool& __i1, bool __i2,
154		    memory_order __m = memory_order_seq_cst) volatile noexcept
155    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
156  };
157
158
159  /**
160   *  @brief Generic atomic type, primary class template.
161   *
162   *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
163   */
164  template<typename _Tp>
165    struct atomic
166    {
167    private:
168      // Align 1/2/4/8/16-byte types to at least their size.
169      static constexpr int _S_min_alignment
170	= (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
171	? 0 : sizeof(_Tp);
172
173      static constexpr int _S_alignment
174        = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
175
176      alignas(_S_alignment) _Tp _M_i;
177
178      static_assert(__is_trivially_copyable(_Tp),
179		    "std::atomic requires a trivially copyable type");
180
181      static_assert(sizeof(_Tp) > 0,
182		    "Incomplete or zero-sized types are not supported");
183
184    public:
185      atomic() noexcept = default;
186      ~atomic() noexcept = default;
187      atomic(const atomic&) = delete;
188      atomic& operator=(const atomic&) = delete;
189      atomic& operator=(const atomic&) volatile = delete;
190
191      constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
192
193      operator _Tp() const noexcept
194      { return load(); }
195
196      operator _Tp() const volatile noexcept
197      { return load(); }
198
199      _Tp
200      operator=(_Tp __i) noexcept
201      { store(__i); return __i; }
202
203      _Tp
204      operator=(_Tp __i) volatile noexcept
205      { store(__i); return __i; }
206
207      bool
208      is_lock_free() const noexcept
209      {
210	// Produce a fake, minimally aligned pointer.
211	return __atomic_is_lock_free(sizeof(_M_i),
212	    reinterpret_cast<void *>(-__alignof(_M_i)));
213      }
214
215      bool
216      is_lock_free() const volatile noexcept
217      {
218	// Produce a fake, minimally aligned pointer.
219	return __atomic_is_lock_free(sizeof(_M_i),
220	    reinterpret_cast<void *>(-__alignof(_M_i)));
221      }
222
223      void
224      store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
225      { __atomic_store(&_M_i, &__i, __m); }
226
227      void
228      store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
229      { __atomic_store(&_M_i, &__i, __m); }
230
231      _Tp
232      load(memory_order __m = memory_order_seq_cst) const noexcept
233      {
234	alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
235	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
236	__atomic_load(&_M_i, __ptr, __m);
237	return *__ptr;
238      }
239
240      _Tp
241      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
242      {
243        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
244	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
245	__atomic_load(&_M_i, __ptr, __m);
246	return *__ptr;
247      }
248
249      _Tp
250      exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
251      {
252        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
253	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
254	__atomic_exchange(&_M_i, &__i, __ptr, __m);
255	return *__ptr;
256      }
257
258      _Tp
259      exchange(_Tp __i,
260	       memory_order __m = memory_order_seq_cst) volatile noexcept
261      {
262        alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
263	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
264	__atomic_exchange(&_M_i, &__i, __ptr, __m);
265	return *__ptr;
266      }
267
268      bool
269      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
270			    memory_order __f) noexcept
271      {
272	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
273      }
274
275      bool
276      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
277			    memory_order __f) volatile noexcept
278      {
279	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
280      }
281
282      bool
283      compare_exchange_weak(_Tp& __e, _Tp __i,
284			    memory_order __m = memory_order_seq_cst) noexcept
285      { return compare_exchange_weak(__e, __i, __m,
286                                     __cmpexch_failure_order(__m)); }
287
288      bool
289      compare_exchange_weak(_Tp& __e, _Tp __i,
290		     memory_order __m = memory_order_seq_cst) volatile noexcept
291      { return compare_exchange_weak(__e, __i, __m,
292                                     __cmpexch_failure_order(__m)); }
293
294      bool
295      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
296			      memory_order __f) noexcept
297      {
298	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
299      }
300
301      bool
302      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
303			      memory_order __f) volatile noexcept
304      {
305	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
306      }
307
308      bool
309      compare_exchange_strong(_Tp& __e, _Tp __i,
310			       memory_order __m = memory_order_seq_cst) noexcept
311      { return compare_exchange_strong(__e, __i, __m,
312                                       __cmpexch_failure_order(__m)); }
313
314      bool
315      compare_exchange_strong(_Tp& __e, _Tp __i,
316		     memory_order __m = memory_order_seq_cst) volatile noexcept
317      { return compare_exchange_strong(__e, __i, __m,
318                                       __cmpexch_failure_order(__m)); }
319    };
320
321
322  /// Partial specialization for pointer types.
323  template<typename _Tp>
324    struct atomic<_Tp*>
325    {
326      typedef _Tp* 			__pointer_type;
327      typedef __atomic_base<_Tp*>	__base_type;
328      __base_type			_M_b;
329
330      atomic() noexcept = default;
331      ~atomic() noexcept = default;
332      atomic(const atomic&) = delete;
333      atomic& operator=(const atomic&) = delete;
334      atomic& operator=(const atomic&) volatile = delete;
335
336      constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
337
338      operator __pointer_type() const noexcept
339      { return __pointer_type(_M_b); }
340
341      operator __pointer_type() const volatile noexcept
342      { return __pointer_type(_M_b); }
343
344      __pointer_type
345      operator=(__pointer_type __p) noexcept
346      { return _M_b.operator=(__p); }
347
348      __pointer_type
349      operator=(__pointer_type __p) volatile noexcept
350      { return _M_b.operator=(__p); }
351
352      __pointer_type
353      operator++(int) noexcept
354      { return _M_b++; }
355
356      __pointer_type
357      operator++(int) volatile noexcept
358      { return _M_b++; }
359
360      __pointer_type
361      operator--(int) noexcept
362      { return _M_b--; }
363
364      __pointer_type
365      operator--(int) volatile noexcept
366      { return _M_b--; }
367
368      __pointer_type
369      operator++() noexcept
370      { return ++_M_b; }
371
372      __pointer_type
373      operator++() volatile noexcept
374      { return ++_M_b; }
375
376      __pointer_type
377      operator--() noexcept
378      { return --_M_b; }
379
380      __pointer_type
381      operator--() volatile noexcept
382      { return --_M_b; }
383
384      __pointer_type
385      operator+=(ptrdiff_t __d) noexcept
386      { return _M_b.operator+=(__d); }
387
388      __pointer_type
389      operator+=(ptrdiff_t __d) volatile noexcept
390      { return _M_b.operator+=(__d); }
391
392      __pointer_type
393      operator-=(ptrdiff_t __d) noexcept
394      { return _M_b.operator-=(__d); }
395
396      __pointer_type
397      operator-=(ptrdiff_t __d) volatile noexcept
398      { return _M_b.operator-=(__d); }
399
400      bool
401      is_lock_free() const noexcept
402      { return _M_b.is_lock_free(); }
403
404      bool
405      is_lock_free() const volatile noexcept
406      { return _M_b.is_lock_free(); }
407
408      void
409      store(__pointer_type __p,
410	    memory_order __m = memory_order_seq_cst) noexcept
411      { return _M_b.store(__p, __m); }
412
413      void
414      store(__pointer_type __p,
415	    memory_order __m = memory_order_seq_cst) volatile noexcept
416      { return _M_b.store(__p, __m); }
417
418      __pointer_type
419      load(memory_order __m = memory_order_seq_cst) const noexcept
420      { return _M_b.load(__m); }
421
422      __pointer_type
423      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
424      { return _M_b.load(__m); }
425
426      __pointer_type
427      exchange(__pointer_type __p,
428	       memory_order __m = memory_order_seq_cst) noexcept
429      { return _M_b.exchange(__p, __m); }
430
431      __pointer_type
432      exchange(__pointer_type __p,
433	       memory_order __m = memory_order_seq_cst) volatile noexcept
434      { return _M_b.exchange(__p, __m); }
435
436      bool
437      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
438			    memory_order __m1, memory_order __m2) noexcept
439      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
440
441      bool
442      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
443			    memory_order __m1,
444			    memory_order __m2) volatile noexcept
445      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
446
447      bool
448      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
449			    memory_order __m = memory_order_seq_cst) noexcept
450      {
451	return compare_exchange_weak(__p1, __p2, __m,
452				     __cmpexch_failure_order(__m));
453      }
454
455      bool
456      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
457		    memory_order __m = memory_order_seq_cst) volatile noexcept
458      {
459	return compare_exchange_weak(__p1, __p2, __m,
460				     __cmpexch_failure_order(__m));
461      }
462
463      bool
464      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
465			      memory_order __m1, memory_order __m2) noexcept
466      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
467
468      bool
469      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
470			      memory_order __m1,
471			      memory_order __m2) volatile noexcept
472      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
473
474      bool
475      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
476			      memory_order __m = memory_order_seq_cst) noexcept
477      {
478	return _M_b.compare_exchange_strong(__p1, __p2, __m,
479					    __cmpexch_failure_order(__m));
480      }
481
482      bool
483      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
484		    memory_order __m = memory_order_seq_cst) volatile noexcept
485      {
486	return _M_b.compare_exchange_strong(__p1, __p2, __m,
487					    __cmpexch_failure_order(__m));
488      }
489
490      __pointer_type
491      fetch_add(ptrdiff_t __d,
492		memory_order __m = memory_order_seq_cst) noexcept
493      { return _M_b.fetch_add(__d, __m); }
494
495      __pointer_type
496      fetch_add(ptrdiff_t __d,
497		memory_order __m = memory_order_seq_cst) volatile noexcept
498      { return _M_b.fetch_add(__d, __m); }
499
500      __pointer_type
501      fetch_sub(ptrdiff_t __d,
502		memory_order __m = memory_order_seq_cst) noexcept
503      { return _M_b.fetch_sub(__d, __m); }
504
505      __pointer_type
506      fetch_sub(ptrdiff_t __d,
507		memory_order __m = memory_order_seq_cst) volatile noexcept
508      { return _M_b.fetch_sub(__d, __m); }
509    };
510
511
512  /// Explicit specialization for char.
513  template<>
514    struct atomic<char> : __atomic_base<char>
515    {
516      typedef char 			__integral_type;
517      typedef __atomic_base<char> 	__base_type;
518
519      atomic() noexcept = default;
520      ~atomic() noexcept = default;
521      atomic(const atomic&) = delete;
522      atomic& operator=(const atomic&) = delete;
523      atomic& operator=(const atomic&) volatile = delete;
524
525      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
526
527      using __base_type::operator __integral_type;
528      using __base_type::operator=;
529    };
530
531  /// Explicit specialization for signed char.
532  template<>
533    struct atomic<signed char> : __atomic_base<signed char>
534    {
535      typedef signed char 		__integral_type;
536      typedef __atomic_base<signed char> 	__base_type;
537
538      atomic() noexcept= default;
539      ~atomic() noexcept = default;
540      atomic(const atomic&) = delete;
541      atomic& operator=(const atomic&) = delete;
542      atomic& operator=(const atomic&) volatile = delete;
543
544      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
545
546      using __base_type::operator __integral_type;
547      using __base_type::operator=;
548    };
549
550  /// Explicit specialization for unsigned char.
551  template<>
552    struct atomic<unsigned char> : __atomic_base<unsigned char>
553    {
554      typedef unsigned char 		__integral_type;
555      typedef __atomic_base<unsigned char> 	__base_type;
556
557      atomic() noexcept= default;
558      ~atomic() noexcept = default;
559      atomic(const atomic&) = delete;
560      atomic& operator=(const atomic&) = delete;
561      atomic& operator=(const atomic&) volatile = delete;
562
563      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
564
565      using __base_type::operator __integral_type;
566      using __base_type::operator=;
567    };
568
569  /// Explicit specialization for short.
570  template<>
571    struct atomic<short> : __atomic_base<short>
572    {
573      typedef short 			__integral_type;
574      typedef __atomic_base<short> 		__base_type;
575
576      atomic() noexcept = default;
577      ~atomic() noexcept = default;
578      atomic(const atomic&) = delete;
579      atomic& operator=(const atomic&) = delete;
580      atomic& operator=(const atomic&) volatile = delete;
581
582      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
583
584      using __base_type::operator __integral_type;
585      using __base_type::operator=;
586    };
587
588  /// Explicit specialization for unsigned short.
589  template<>
590    struct atomic<unsigned short> : __atomic_base<unsigned short>
591    {
592      typedef unsigned short 	      	__integral_type;
593      typedef __atomic_base<unsigned short> 		__base_type;
594
595      atomic() noexcept = default;
596      ~atomic() noexcept = default;
597      atomic(const atomic&) = delete;
598      atomic& operator=(const atomic&) = delete;
599      atomic& operator=(const atomic&) volatile = delete;
600
601      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
602
603      using __base_type::operator __integral_type;
604      using __base_type::operator=;
605    };
606
607  /// Explicit specialization for int.
608  template<>
609    struct atomic<int> : __atomic_base<int>
610    {
611      typedef int 			__integral_type;
612      typedef __atomic_base<int> 		__base_type;
613
614      atomic() noexcept = default;
615      ~atomic() noexcept = default;
616      atomic(const atomic&) = delete;
617      atomic& operator=(const atomic&) = delete;
618      atomic& operator=(const atomic&) volatile = delete;
619
620      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
621
622      using __base_type::operator __integral_type;
623      using __base_type::operator=;
624    };
625
626  /// Explicit specialization for unsigned int.
627  template<>
628    struct atomic<unsigned int> : __atomic_base<unsigned int>
629    {
630      typedef unsigned int		__integral_type;
631      typedef __atomic_base<unsigned int> 	__base_type;
632
633      atomic() noexcept = default;
634      ~atomic() noexcept = default;
635      atomic(const atomic&) = delete;
636      atomic& operator=(const atomic&) = delete;
637      atomic& operator=(const atomic&) volatile = delete;
638
639      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
640
641      using __base_type::operator __integral_type;
642      using __base_type::operator=;
643    };
644
645  /// Explicit specialization for long.
646  template<>
647    struct atomic<long> : __atomic_base<long>
648    {
649      typedef long 			__integral_type;
650      typedef __atomic_base<long> 	__base_type;
651
652      atomic() noexcept = default;
653      ~atomic() noexcept = default;
654      atomic(const atomic&) = delete;
655      atomic& operator=(const atomic&) = delete;
656      atomic& operator=(const atomic&) volatile = delete;
657
658      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
659
660      using __base_type::operator __integral_type;
661      using __base_type::operator=;
662    };
663
664  /// Explicit specialization for unsigned long.
665  template<>
666    struct atomic<unsigned long> : __atomic_base<unsigned long>
667    {
668      typedef unsigned long 		__integral_type;
669      typedef __atomic_base<unsigned long> 	__base_type;
670
671      atomic() noexcept = default;
672      ~atomic() noexcept = default;
673      atomic(const atomic&) = delete;
674      atomic& operator=(const atomic&) = delete;
675      atomic& operator=(const atomic&) volatile = delete;
676
677      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
678
679      using __base_type::operator __integral_type;
680      using __base_type::operator=;
681    };
682
683  /// Explicit specialization for long long.
684  template<>
685    struct atomic<long long> : __atomic_base<long long>
686    {
687      typedef long long 		__integral_type;
688      typedef __atomic_base<long long> 		__base_type;
689
690      atomic() noexcept = default;
691      ~atomic() noexcept = default;
692      atomic(const atomic&) = delete;
693      atomic& operator=(const atomic&) = delete;
694      atomic& operator=(const atomic&) volatile = delete;
695
696      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
697
698      using __base_type::operator __integral_type;
699      using __base_type::operator=;
700    };
701
702  /// Explicit specialization for unsigned long long.
703  template<>
704    struct atomic<unsigned long long> : __atomic_base<unsigned long long>
705    {
706      typedef unsigned long long       	__integral_type;
707      typedef __atomic_base<unsigned long long> 	__base_type;
708
709      atomic() noexcept = default;
710      ~atomic() noexcept = default;
711      atomic(const atomic&) = delete;
712      atomic& operator=(const atomic&) = delete;
713      atomic& operator=(const atomic&) volatile = delete;
714
715      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
716
717      using __base_type::operator __integral_type;
718      using __base_type::operator=;
719    };
720
721  /// Explicit specialization for wchar_t.
722  template<>
723    struct atomic<wchar_t> : __atomic_base<wchar_t>
724    {
725      typedef wchar_t 			__integral_type;
726      typedef __atomic_base<wchar_t> 	__base_type;
727
728      atomic() noexcept = default;
729      ~atomic() noexcept = default;
730      atomic(const atomic&) = delete;
731      atomic& operator=(const atomic&) = delete;
732      atomic& operator=(const atomic&) volatile = delete;
733
734      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
735
736      using __base_type::operator __integral_type;
737      using __base_type::operator=;
738    };
739
740  /// Explicit specialization for char16_t.
741  template<>
742    struct atomic<char16_t> : __atomic_base<char16_t>
743    {
744      typedef char16_t 			__integral_type;
745      typedef __atomic_base<char16_t> 	__base_type;
746
747      atomic() noexcept = default;
748      ~atomic() noexcept = default;
749      atomic(const atomic&) = delete;
750      atomic& operator=(const atomic&) = delete;
751      atomic& operator=(const atomic&) volatile = delete;
752
753      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
754
755      using __base_type::operator __integral_type;
756      using __base_type::operator=;
757    };
758
759  /// Explicit specialization for char32_t.
760  template<>
761    struct atomic<char32_t> : __atomic_base<char32_t>
762    {
763      typedef char32_t 			__integral_type;
764      typedef __atomic_base<char32_t> 	__base_type;
765
766      atomic() noexcept = default;
767      ~atomic() noexcept = default;
768      atomic(const atomic&) = delete;
769      atomic& operator=(const atomic&) = delete;
770      atomic& operator=(const atomic&) volatile = delete;
771
772      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
773
774      using __base_type::operator __integral_type;
775      using __base_type::operator=;
776    };
777
778
779  /// atomic_bool
780  typedef atomic<bool>			atomic_bool;
781
782  /// atomic_char
783  typedef atomic<char>			atomic_char;
784
785  /// atomic_schar
786  typedef atomic<signed char>		atomic_schar;
787
788  /// atomic_uchar
789  typedef atomic<unsigned char>		atomic_uchar;
790
791  /// atomic_short
792  typedef atomic<short>			atomic_short;
793
794  /// atomic_ushort
795  typedef atomic<unsigned short>	atomic_ushort;
796
797  /// atomic_int
798  typedef atomic<int>			atomic_int;
799
800  /// atomic_uint
801  typedef atomic<unsigned int>		atomic_uint;
802
803  /// atomic_long
804  typedef atomic<long>			atomic_long;
805
806  /// atomic_ulong
807  typedef atomic<unsigned long>		atomic_ulong;
808
809  /// atomic_llong
810  typedef atomic<long long>		atomic_llong;
811
812  /// atomic_ullong
813  typedef atomic<unsigned long long>	atomic_ullong;
814
815  /// atomic_wchar_t
816  typedef atomic<wchar_t>		atomic_wchar_t;
817
818  /// atomic_char16_t
819  typedef atomic<char16_t>		atomic_char16_t;
820
821  /// atomic_char32_t
822  typedef atomic<char32_t>		atomic_char32_t;
823
824
825  /// atomic_int_least8_t
826  typedef atomic<int_least8_t>		atomic_int_least8_t;
827
828  /// atomic_uint_least8_t
829  typedef atomic<uint_least8_t>		atomic_uint_least8_t;
830
831  /// atomic_int_least16_t
832  typedef atomic<int_least16_t>		atomic_int_least16_t;
833
834  /// atomic_uint_least16_t
835  typedef atomic<uint_least16_t>	atomic_uint_least16_t;
836
837  /// atomic_int_least32_t
838  typedef atomic<int_least32_t>		atomic_int_least32_t;
839
840  /// atomic_uint_least32_t
841  typedef atomic<uint_least32_t>	atomic_uint_least32_t;
842
843  /// atomic_int_least64_t
844  typedef atomic<int_least64_t>		atomic_int_least64_t;
845
846  /// atomic_uint_least64_t
847  typedef atomic<uint_least64_t>	atomic_uint_least64_t;
848
849
850  /// atomic_int_fast8_t
851  typedef atomic<int_fast8_t>		atomic_int_fast8_t;
852
853  /// atomic_uint_fast8_t
854  typedef atomic<uint_fast8_t>		atomic_uint_fast8_t;
855
856  /// atomic_int_fast16_t
857  typedef atomic<int_fast16_t>		atomic_int_fast16_t;
858
859  /// atomic_uint_fast16_t
860  typedef atomic<uint_fast16_t>		atomic_uint_fast16_t;
861
862  /// atomic_int_fast32_t
863  typedef atomic<int_fast32_t>		atomic_int_fast32_t;
864
865  /// atomic_uint_fast32_t
866  typedef atomic<uint_fast32_t>		atomic_uint_fast32_t;
867
868  /// atomic_int_fast64_t
869  typedef atomic<int_fast64_t>		atomic_int_fast64_t;
870
871  /// atomic_uint_fast64_t
872  typedef atomic<uint_fast64_t>		atomic_uint_fast64_t;
873
874
875  /// atomic_intptr_t
876  typedef atomic<intptr_t>		atomic_intptr_t;
877
878  /// atomic_uintptr_t
879  typedef atomic<uintptr_t>		atomic_uintptr_t;
880
881  /// atomic_size_t
882  typedef atomic<size_t>		atomic_size_t;
883
884  /// atomic_intmax_t
885  typedef atomic<intmax_t>		atomic_intmax_t;
886
887  /// atomic_uintmax_t
888  typedef atomic<uintmax_t>		atomic_uintmax_t;
889
890  /// atomic_ptrdiff_t
891  typedef atomic<ptrdiff_t>		atomic_ptrdiff_t;
892
893
894  // Function definitions, atomic_flag operations.
895  inline bool
896  atomic_flag_test_and_set_explicit(atomic_flag* __a,
897				    memory_order __m) noexcept
898  { return __a->test_and_set(__m); }
899
900  inline bool
901  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
902				    memory_order __m) noexcept
903  { return __a->test_and_set(__m); }
904
905  inline void
906  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
907  { __a->clear(__m); }
908
909  inline void
910  atomic_flag_clear_explicit(volatile atomic_flag* __a,
911			     memory_order __m) noexcept
912  { __a->clear(__m); }
913
914  inline bool
915  atomic_flag_test_and_set(atomic_flag* __a) noexcept
916  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
917
918  inline bool
919  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
920  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
921
922  inline void
923  atomic_flag_clear(atomic_flag* __a) noexcept
924  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
925
926  inline void
927  atomic_flag_clear(volatile atomic_flag* __a) noexcept
928  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
929
930
931  // Function templates generally applicable to atomic types.
932  template<typename _ITp>
933    inline bool
934    atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
935    { return __a->is_lock_free(); }
936
937  template<typename _ITp>
938    inline bool
939    atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
940    { return __a->is_lock_free(); }
941
942  template<typename _ITp>
943    inline void
944    atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
945    { __a->store(__i, memory_order_relaxed); }
946
947  template<typename _ITp>
948    inline void
949    atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
950    { __a->store(__i, memory_order_relaxed); }
951
952  template<typename _ITp>
953    inline void
954    atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
955			  memory_order __m) noexcept
956    { __a->store(__i, __m); }
957
958  template<typename _ITp>
959    inline void
960    atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
961			  memory_order __m) noexcept
962    { __a->store(__i, __m); }
963
964  template<typename _ITp>
965    inline _ITp
966    atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
967    { return __a->load(__m); }
968
969  template<typename _ITp>
970    inline _ITp
971    atomic_load_explicit(const volatile atomic<_ITp>* __a,
972			 memory_order __m) noexcept
973    { return __a->load(__m); }
974
975  template<typename _ITp>
976    inline _ITp
977    atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
978			     memory_order __m) noexcept
979    { return __a->exchange(__i, __m); }
980
981  template<typename _ITp>
982    inline _ITp
983    atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
984			     memory_order __m) noexcept
985    { return __a->exchange(__i, __m); }
986
987  template<typename _ITp>
988    inline bool
989    atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
990					  _ITp* __i1, _ITp __i2,
991					  memory_order __m1,
992					  memory_order __m2) noexcept
993    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
994
995  template<typename _ITp>
996    inline bool
997    atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
998					  _ITp* __i1, _ITp __i2,
999					  memory_order __m1,
1000					  memory_order __m2) noexcept
1001    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1002
1003  template<typename _ITp>
1004    inline bool
1005    atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1006					    _ITp* __i1, _ITp __i2,
1007					    memory_order __m1,
1008					    memory_order __m2) noexcept
1009    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1010
1011  template<typename _ITp>
1012    inline bool
1013    atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1014					    _ITp* __i1, _ITp __i2,
1015					    memory_order __m1,
1016					    memory_order __m2) noexcept
1017    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1018
1019
1020  template<typename _ITp>
1021    inline void
1022    atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1023    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1024
1025  template<typename _ITp>
1026    inline void
1027    atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1028    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1029
1030  template<typename _ITp>
1031    inline _ITp
1032    atomic_load(const atomic<_ITp>* __a) noexcept
1033    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1034
1035  template<typename _ITp>
1036    inline _ITp
1037    atomic_load(const volatile atomic<_ITp>* __a) noexcept
1038    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1039
1040  template<typename _ITp>
1041    inline _ITp
1042    atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1043    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1044
1045  template<typename _ITp>
1046    inline _ITp
1047    atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1048    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1049
1050  template<typename _ITp>
1051    inline bool
1052    atomic_compare_exchange_weak(atomic<_ITp>* __a,
1053				 _ITp* __i1, _ITp __i2) noexcept
1054    {
1055      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1056						   memory_order_seq_cst,
1057						   memory_order_seq_cst);
1058    }
1059
1060  template<typename _ITp>
1061    inline bool
1062    atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1063				 _ITp* __i1, _ITp __i2) noexcept
1064    {
1065      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1066						   memory_order_seq_cst,
1067						   memory_order_seq_cst);
1068    }
1069
1070  template<typename _ITp>
1071    inline bool
1072    atomic_compare_exchange_strong(atomic<_ITp>* __a,
1073				   _ITp* __i1, _ITp __i2) noexcept
1074    {
1075      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1076						     memory_order_seq_cst,
1077						     memory_order_seq_cst);
1078    }
1079
1080  template<typename _ITp>
1081    inline bool
1082    atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1083				   _ITp* __i1, _ITp __i2) noexcept
1084    {
1085      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1086						     memory_order_seq_cst,
1087						     memory_order_seq_cst);
1088    }
1089
1090  // Function templates for atomic_integral operations only, using
1091  // __atomic_base. Template argument should be constricted to
1092  // intergral types as specified in the standard, excluding address
1093  // types.
1094  template<typename _ITp>
1095    inline _ITp
1096    atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1097			      memory_order __m) noexcept
1098    { return __a->fetch_add(__i, __m); }
1099
1100  template<typename _ITp>
1101    inline _ITp
1102    atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1103			      memory_order __m) noexcept
1104    { return __a->fetch_add(__i, __m); }
1105
1106  template<typename _ITp>
1107    inline _ITp
1108    atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1109			      memory_order __m) noexcept
1110    { return __a->fetch_sub(__i, __m); }
1111
1112  template<typename _ITp>
1113    inline _ITp
1114    atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1115			      memory_order __m) noexcept
1116    { return __a->fetch_sub(__i, __m); }
1117
1118  template<typename _ITp>
1119    inline _ITp
1120    atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1121			      memory_order __m) noexcept
1122    { return __a->fetch_and(__i, __m); }
1123
1124  template<typename _ITp>
1125    inline _ITp
1126    atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1127			      memory_order __m) noexcept
1128    { return __a->fetch_and(__i, __m); }
1129
1130  template<typename _ITp>
1131    inline _ITp
1132    atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1133			     memory_order __m) noexcept
1134    { return __a->fetch_or(__i, __m); }
1135
1136  template<typename _ITp>
1137    inline _ITp
1138    atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1139			     memory_order __m) noexcept
1140    { return __a->fetch_or(__i, __m); }
1141
1142  template<typename _ITp>
1143    inline _ITp
1144    atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1145			      memory_order __m) noexcept
1146    { return __a->fetch_xor(__i, __m); }
1147
1148  template<typename _ITp>
1149    inline _ITp
1150    atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1151			      memory_order __m) noexcept
1152    { return __a->fetch_xor(__i, __m); }
1153
1154  template<typename _ITp>
1155    inline _ITp
1156    atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1157    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1158
1159  template<typename _ITp>
1160    inline _ITp
1161    atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1162    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1163
1164  template<typename _ITp>
1165    inline _ITp
1166    atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1167    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1168
1169  template<typename _ITp>
1170    inline _ITp
1171    atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1172    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1173
1174  template<typename _ITp>
1175    inline _ITp
1176    atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1177    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1178
1179  template<typename _ITp>
1180    inline _ITp
1181    atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1182    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1183
1184  template<typename _ITp>
1185    inline _ITp
1186    atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1187    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1188
1189  template<typename _ITp>
1190    inline _ITp
1191    atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1192    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1193
1194  template<typename _ITp>
1195    inline _ITp
1196    atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1197    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1198
1199  template<typename _ITp>
1200    inline _ITp
1201    atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1202    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1203
1204
1205  // Partial specializations for pointers.
1206  template<typename _ITp>
1207    inline _ITp*
1208    atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1209			      memory_order __m) noexcept
1210    { return __a->fetch_add(__d, __m); }
1211
1212  template<typename _ITp>
1213    inline _ITp*
1214    atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1215			      memory_order __m) noexcept
1216    { return __a->fetch_add(__d, __m); }
1217
1218  template<typename _ITp>
1219    inline _ITp*
1220    atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1221    { return __a->fetch_add(__d); }
1222
1223  template<typename _ITp>
1224    inline _ITp*
1225    atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1226    { return __a->fetch_add(__d); }
1227
1228  template<typename _ITp>
1229    inline _ITp*
1230    atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1231			      ptrdiff_t __d, memory_order __m) noexcept
1232    { return __a->fetch_sub(__d, __m); }
1233
1234  template<typename _ITp>
1235    inline _ITp*
1236    atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1237			      memory_order __m) noexcept
1238    { return __a->fetch_sub(__d, __m); }
1239
1240  template<typename _ITp>
1241    inline _ITp*
1242    atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1243    { return __a->fetch_sub(__d); }
1244
1245  template<typename _ITp>
1246    inline _ITp*
1247    atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1248    { return __a->fetch_sub(__d); }
1249  // @} group atomics
1250
1251_GLIBCXX_END_NAMESPACE_VERSION
1252} // namespace
1253
1254#endif // C++11
1255
1256#endif // _GLIBCXX_ATOMIC
1257