1// -*- C++ -*- header. 2 3// Copyright (C) 2008-2014 Free Software Foundation, Inc. 4// 5// This file is part of the GNU ISO C++ Library. This library is free 6// software; you can redistribute it and/or modify it under the 7// terms of the GNU General Public License as published by the 8// Free Software Foundation; either version 3, or (at your option) 9// any later version. 10 11// This library is distributed in the hope that it will be useful, 12// but WITHOUT ANY WARRANTY; without even the implied warranty of 13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14// GNU General Public License for more details. 15 16// Under Section 7 of GPL version 3, you are granted additional 17// permissions described in the GCC Runtime Library Exception, version 18// 3.1, as published by the Free Software Foundation. 19 20// You should have received a copy of the GNU General Public License and 21// a copy of the GCC Runtime Library Exception along with this program; 22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23// <http://www.gnu.org/licenses/>. 24 25/** @file include/atomic 26 * This is a Standard C++ Library header. 27 */ 28 29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 31 32#ifndef _GLIBCXX_ATOMIC 33#define _GLIBCXX_ATOMIC 1 34 35#pragma GCC system_header 36 37#if __cplusplus < 201103L 38# include <bits/c++0x_warning.h> 39#endif 40 41#include <bits/atomic_base.h> 42 43namespace std _GLIBCXX_VISIBILITY(default) 44{ 45_GLIBCXX_BEGIN_NAMESPACE_VERSION 46 47 /** 48 * @addtogroup atomics 49 * @{ 50 */ 51 52 /// atomic_bool 53 // NB: No operators or fetch-operations for this type. 54 struct atomic_bool 55 { 56 private: 57 __atomic_base<bool> _M_base; 58 59 public: 60 atomic_bool() noexcept = default; 61 ~atomic_bool() noexcept = default; 62 atomic_bool(const atomic_bool&) = delete; 63 atomic_bool& operator=(const atomic_bool&) = delete; 64 atomic_bool& operator=(const atomic_bool&) volatile = delete; 65 66 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } 67 68 bool 69 operator=(bool __i) noexcept 70 { return _M_base.operator=(__i); } 71 72 bool 73 operator=(bool __i) volatile noexcept 74 { return _M_base.operator=(__i); } 75 76 operator bool() const noexcept 77 { return _M_base.load(); } 78 79 operator bool() const volatile noexcept 80 { return _M_base.load(); } 81 82 bool 83 is_lock_free() const noexcept { return _M_base.is_lock_free(); } 84 85 bool 86 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } 87 88 void 89 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept 90 { _M_base.store(__i, __m); } 91 92 void 93 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept 94 { _M_base.store(__i, __m); } 95 96 bool 97 load(memory_order __m = memory_order_seq_cst) const noexcept 98 { return _M_base.load(__m); } 99 100 bool 101 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 102 { return _M_base.load(__m); } 103 104 bool 105 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept 106 { return _M_base.exchange(__i, __m); } 107 108 bool 109 exchange(bool __i, 110 memory_order __m = memory_order_seq_cst) volatile noexcept 111 { return _M_base.exchange(__i, __m); } 112 113 bool 114 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 115 memory_order __m2) noexcept 116 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 117 118 bool 119 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 120 memory_order __m2) volatile noexcept 121 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 122 123 bool 124 compare_exchange_weak(bool& __i1, bool __i2, 125 memory_order __m = memory_order_seq_cst) noexcept 126 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 127 128 bool 129 compare_exchange_weak(bool& __i1, bool __i2, 130 memory_order __m = memory_order_seq_cst) volatile noexcept 131 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 132 133 bool 134 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 135 memory_order __m2) noexcept 136 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 137 138 bool 139 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 140 memory_order __m2) volatile noexcept 141 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 142 143 bool 144 compare_exchange_strong(bool& __i1, bool __i2, 145 memory_order __m = memory_order_seq_cst) noexcept 146 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 147 148 bool 149 compare_exchange_strong(bool& __i1, bool __i2, 150 memory_order __m = memory_order_seq_cst) volatile noexcept 151 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 152 }; 153 154 155 /** 156 * @brief Generic atomic type, primary class template. 157 * 158 * @tparam _Tp Type to be made atomic, must be trivally copyable. 159 */ 160 template<typename _Tp> 161 struct atomic 162 { 163 private: 164 _Tp _M_i; 165 166 public: 167 atomic() noexcept = default; 168 ~atomic() noexcept = default; 169 atomic(const atomic&) = delete; 170 atomic& operator=(const atomic&) = delete; 171 atomic& operator=(const atomic&) volatile = delete; 172 173 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } 174 175 operator _Tp() const noexcept 176 { return load(); } 177 178 operator _Tp() const volatile noexcept 179 { return load(); } 180 181 _Tp 182 operator=(_Tp __i) noexcept 183 { store(__i); return __i; } 184 185 _Tp 186 operator=(_Tp __i) volatile noexcept 187 { store(__i); return __i; } 188 189 bool 190 is_lock_free() const noexcept 191 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); } 192 193 bool 194 is_lock_free() const volatile noexcept 195 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); } 196 197 void 198 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 199 { __atomic_store(&_M_i, &__i, _m); } 200 201 void 202 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept 203 { __atomic_store(&_M_i, &__i, _m); } 204 205 _Tp 206 load(memory_order _m = memory_order_seq_cst) const noexcept 207 { 208 _Tp tmp; 209 __atomic_load(&_M_i, &tmp, _m); 210 return tmp; 211 } 212 213 _Tp 214 load(memory_order _m = memory_order_seq_cst) const volatile noexcept 215 { 216 _Tp tmp; 217 __atomic_load(&_M_i, &tmp, _m); 218 return tmp; 219 } 220 221 _Tp 222 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 223 { 224 _Tp tmp; 225 __atomic_exchange(&_M_i, &__i, &tmp, _m); 226 return tmp; 227 } 228 229 _Tp 230 exchange(_Tp __i, 231 memory_order _m = memory_order_seq_cst) volatile noexcept 232 { 233 _Tp tmp; 234 __atomic_exchange(&_M_i, &__i, &tmp, _m); 235 return tmp; 236 } 237 238 bool 239 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 240 memory_order __f) noexcept 241 { 242 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 243 } 244 245 bool 246 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 247 memory_order __f) volatile noexcept 248 { 249 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 250 } 251 252 bool 253 compare_exchange_weak(_Tp& __e, _Tp __i, 254 memory_order __m = memory_order_seq_cst) noexcept 255 { return compare_exchange_weak(__e, __i, __m, 256 __cmpexch_failure_order(__m)); } 257 258 bool 259 compare_exchange_weak(_Tp& __e, _Tp __i, 260 memory_order __m = memory_order_seq_cst) volatile noexcept 261 { return compare_exchange_weak(__e, __i, __m, 262 __cmpexch_failure_order(__m)); } 263 264 bool 265 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 266 memory_order __f) noexcept 267 { 268 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 269 } 270 271 bool 272 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 273 memory_order __f) volatile noexcept 274 { 275 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 276 } 277 278 bool 279 compare_exchange_strong(_Tp& __e, _Tp __i, 280 memory_order __m = memory_order_seq_cst) noexcept 281 { return compare_exchange_strong(__e, __i, __m, 282 __cmpexch_failure_order(__m)); } 283 284 bool 285 compare_exchange_strong(_Tp& __e, _Tp __i, 286 memory_order __m = memory_order_seq_cst) volatile noexcept 287 { return compare_exchange_strong(__e, __i, __m, 288 __cmpexch_failure_order(__m)); } 289 }; 290 291 292 /// Partial specialization for pointer types. 293 template<typename _Tp> 294 struct atomic<_Tp*> 295 { 296 typedef _Tp* __pointer_type; 297 typedef __atomic_base<_Tp*> __base_type; 298 __base_type _M_b; 299 300 atomic() noexcept = default; 301 ~atomic() noexcept = default; 302 atomic(const atomic&) = delete; 303 atomic& operator=(const atomic&) = delete; 304 atomic& operator=(const atomic&) volatile = delete; 305 306 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } 307 308 operator __pointer_type() const noexcept 309 { return __pointer_type(_M_b); } 310 311 operator __pointer_type() const volatile noexcept 312 { return __pointer_type(_M_b); } 313 314 __pointer_type 315 operator=(__pointer_type __p) noexcept 316 { return _M_b.operator=(__p); } 317 318 __pointer_type 319 operator=(__pointer_type __p) volatile noexcept 320 { return _M_b.operator=(__p); } 321 322 __pointer_type 323 operator++(int) noexcept 324 { return _M_b++; } 325 326 __pointer_type 327 operator++(int) volatile noexcept 328 { return _M_b++; } 329 330 __pointer_type 331 operator--(int) noexcept 332 { return _M_b--; } 333 334 __pointer_type 335 operator--(int) volatile noexcept 336 { return _M_b--; } 337 338 __pointer_type 339 operator++() noexcept 340 { return ++_M_b; } 341 342 __pointer_type 343 operator++() volatile noexcept 344 { return ++_M_b; } 345 346 __pointer_type 347 operator--() noexcept 348 { return --_M_b; } 349 350 __pointer_type 351 operator--() volatile noexcept 352 { return --_M_b; } 353 354 __pointer_type 355 operator+=(ptrdiff_t __d) noexcept 356 { return _M_b.operator+=(__d); } 357 358 __pointer_type 359 operator+=(ptrdiff_t __d) volatile noexcept 360 { return _M_b.operator+=(__d); } 361 362 __pointer_type 363 operator-=(ptrdiff_t __d) noexcept 364 { return _M_b.operator-=(__d); } 365 366 __pointer_type 367 operator-=(ptrdiff_t __d) volatile noexcept 368 { return _M_b.operator-=(__d); } 369 370 bool 371 is_lock_free() const noexcept 372 { return _M_b.is_lock_free(); } 373 374 bool 375 is_lock_free() const volatile noexcept 376 { return _M_b.is_lock_free(); } 377 378 void 379 store(__pointer_type __p, 380 memory_order __m = memory_order_seq_cst) noexcept 381 { return _M_b.store(__p, __m); } 382 383 void 384 store(__pointer_type __p, 385 memory_order __m = memory_order_seq_cst) volatile noexcept 386 { return _M_b.store(__p, __m); } 387 388 __pointer_type 389 load(memory_order __m = memory_order_seq_cst) const noexcept 390 { return _M_b.load(__m); } 391 392 __pointer_type 393 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 394 { return _M_b.load(__m); } 395 396 __pointer_type 397 exchange(__pointer_type __p, 398 memory_order __m = memory_order_seq_cst) noexcept 399 { return _M_b.exchange(__p, __m); } 400 401 __pointer_type 402 exchange(__pointer_type __p, 403 memory_order __m = memory_order_seq_cst) volatile noexcept 404 { return _M_b.exchange(__p, __m); } 405 406 bool 407 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 408 memory_order __m1, memory_order __m2) noexcept 409 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 410 411 bool 412 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 413 memory_order __m1, 414 memory_order __m2) volatile noexcept 415 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 416 417 bool 418 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 419 memory_order __m = memory_order_seq_cst) noexcept 420 { 421 return compare_exchange_weak(__p1, __p2, __m, 422 __cmpexch_failure_order(__m)); 423 } 424 425 bool 426 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 427 memory_order __m = memory_order_seq_cst) volatile noexcept 428 { 429 return compare_exchange_weak(__p1, __p2, __m, 430 __cmpexch_failure_order(__m)); 431 } 432 433 bool 434 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 435 memory_order __m1, memory_order __m2) noexcept 436 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 437 438 bool 439 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 440 memory_order __m1, 441 memory_order __m2) volatile noexcept 442 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 443 444 bool 445 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 446 memory_order __m = memory_order_seq_cst) noexcept 447 { 448 return _M_b.compare_exchange_strong(__p1, __p2, __m, 449 __cmpexch_failure_order(__m)); 450 } 451 452 bool 453 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 454 memory_order __m = memory_order_seq_cst) volatile noexcept 455 { 456 return _M_b.compare_exchange_strong(__p1, __p2, __m, 457 __cmpexch_failure_order(__m)); 458 } 459 460 __pointer_type 461 fetch_add(ptrdiff_t __d, 462 memory_order __m = memory_order_seq_cst) noexcept 463 { return _M_b.fetch_add(__d, __m); } 464 465 __pointer_type 466 fetch_add(ptrdiff_t __d, 467 memory_order __m = memory_order_seq_cst) volatile noexcept 468 { return _M_b.fetch_add(__d, __m); } 469 470 __pointer_type 471 fetch_sub(ptrdiff_t __d, 472 memory_order __m = memory_order_seq_cst) noexcept 473 { return _M_b.fetch_sub(__d, __m); } 474 475 __pointer_type 476 fetch_sub(ptrdiff_t __d, 477 memory_order __m = memory_order_seq_cst) volatile noexcept 478 { return _M_b.fetch_sub(__d, __m); } 479 }; 480 481 482 /// Explicit specialization for bool. 483 template<> 484 struct atomic<bool> : public atomic_bool 485 { 486 typedef bool __integral_type; 487 typedef atomic_bool __base_type; 488 489 atomic() noexcept = default; 490 ~atomic() noexcept = default; 491 atomic(const atomic&) = delete; 492 atomic& operator=(const atomic&) = delete; 493 atomic& operator=(const atomic&) volatile = delete; 494 495 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 496 497 using __base_type::operator __integral_type; 498 using __base_type::operator=; 499 }; 500 501 /// Explicit specialization for char. 502 template<> 503 struct atomic<char> : public atomic_char 504 { 505 typedef char __integral_type; 506 typedef atomic_char __base_type; 507 508 atomic() noexcept = default; 509 ~atomic() noexcept = default; 510 atomic(const atomic&) = delete; 511 atomic& operator=(const atomic&) = delete; 512 atomic& operator=(const atomic&) volatile = delete; 513 514 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 515 516 using __base_type::operator __integral_type; 517 using __base_type::operator=; 518 }; 519 520 /// Explicit specialization for signed char. 521 template<> 522 struct atomic<signed char> : public atomic_schar 523 { 524 typedef signed char __integral_type; 525 typedef atomic_schar __base_type; 526 527 atomic() noexcept= default; 528 ~atomic() noexcept = default; 529 atomic(const atomic&) = delete; 530 atomic& operator=(const atomic&) = delete; 531 atomic& operator=(const atomic&) volatile = delete; 532 533 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 534 535 using __base_type::operator __integral_type; 536 using __base_type::operator=; 537 }; 538 539 /// Explicit specialization for unsigned char. 540 template<> 541 struct atomic<unsigned char> : public atomic_uchar 542 { 543 typedef unsigned char __integral_type; 544 typedef atomic_uchar __base_type; 545 546 atomic() noexcept= default; 547 ~atomic() noexcept = default; 548 atomic(const atomic&) = delete; 549 atomic& operator=(const atomic&) = delete; 550 atomic& operator=(const atomic&) volatile = delete; 551 552 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 553 554 using __base_type::operator __integral_type; 555 using __base_type::operator=; 556 }; 557 558 /// Explicit specialization for short. 559 template<> 560 struct atomic<short> : public atomic_short 561 { 562 typedef short __integral_type; 563 typedef atomic_short __base_type; 564 565 atomic() noexcept = default; 566 ~atomic() noexcept = default; 567 atomic(const atomic&) = delete; 568 atomic& operator=(const atomic&) = delete; 569 atomic& operator=(const atomic&) volatile = delete; 570 571 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 572 573 using __base_type::operator __integral_type; 574 using __base_type::operator=; 575 }; 576 577 /// Explicit specialization for unsigned short. 578 template<> 579 struct atomic<unsigned short> : public atomic_ushort 580 { 581 typedef unsigned short __integral_type; 582 typedef atomic_ushort __base_type; 583 584 atomic() noexcept = default; 585 ~atomic() noexcept = default; 586 atomic(const atomic&) = delete; 587 atomic& operator=(const atomic&) = delete; 588 atomic& operator=(const atomic&) volatile = delete; 589 590 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 591 592 using __base_type::operator __integral_type; 593 using __base_type::operator=; 594 }; 595 596 /// Explicit specialization for int. 597 template<> 598 struct atomic<int> : atomic_int 599 { 600 typedef int __integral_type; 601 typedef atomic_int __base_type; 602 603 atomic() noexcept = default; 604 ~atomic() noexcept = default; 605 atomic(const atomic&) = delete; 606 atomic& operator=(const atomic&) = delete; 607 atomic& operator=(const atomic&) volatile = delete; 608 609 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 610 611 using __base_type::operator __integral_type; 612 using __base_type::operator=; 613 }; 614 615 /// Explicit specialization for unsigned int. 616 template<> 617 struct atomic<unsigned int> : public atomic_uint 618 { 619 typedef unsigned int __integral_type; 620 typedef atomic_uint __base_type; 621 622 atomic() noexcept = default; 623 ~atomic() noexcept = default; 624 atomic(const atomic&) = delete; 625 atomic& operator=(const atomic&) = delete; 626 atomic& operator=(const atomic&) volatile = delete; 627 628 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 629 630 using __base_type::operator __integral_type; 631 using __base_type::operator=; 632 }; 633 634 /// Explicit specialization for long. 635 template<> 636 struct atomic<long> : public atomic_long 637 { 638 typedef long __integral_type; 639 typedef atomic_long __base_type; 640 641 atomic() noexcept = default; 642 ~atomic() noexcept = default; 643 atomic(const atomic&) = delete; 644 atomic& operator=(const atomic&) = delete; 645 atomic& operator=(const atomic&) volatile = delete; 646 647 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 648 649 using __base_type::operator __integral_type; 650 using __base_type::operator=; 651 }; 652 653 /// Explicit specialization for unsigned long. 654 template<> 655 struct atomic<unsigned long> : public atomic_ulong 656 { 657 typedef unsigned long __integral_type; 658 typedef atomic_ulong __base_type; 659 660 atomic() noexcept = default; 661 ~atomic() noexcept = default; 662 atomic(const atomic&) = delete; 663 atomic& operator=(const atomic&) = delete; 664 atomic& operator=(const atomic&) volatile = delete; 665 666 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 667 668 using __base_type::operator __integral_type; 669 using __base_type::operator=; 670 }; 671 672 /// Explicit specialization for long long. 673 template<> 674 struct atomic<long long> : public atomic_llong 675 { 676 typedef long long __integral_type; 677 typedef atomic_llong __base_type; 678 679 atomic() noexcept = default; 680 ~atomic() noexcept = default; 681 atomic(const atomic&) = delete; 682 atomic& operator=(const atomic&) = delete; 683 atomic& operator=(const atomic&) volatile = delete; 684 685 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 686 687 using __base_type::operator __integral_type; 688 using __base_type::operator=; 689 }; 690 691 /// Explicit specialization for unsigned long long. 692 template<> 693 struct atomic<unsigned long long> : public atomic_ullong 694 { 695 typedef unsigned long long __integral_type; 696 typedef atomic_ullong __base_type; 697 698 atomic() noexcept = default; 699 ~atomic() noexcept = default; 700 atomic(const atomic&) = delete; 701 atomic& operator=(const atomic&) = delete; 702 atomic& operator=(const atomic&) volatile = delete; 703 704 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 705 706 using __base_type::operator __integral_type; 707 using __base_type::operator=; 708 }; 709 710 /// Explicit specialization for wchar_t. 711 template<> 712 struct atomic<wchar_t> : public atomic_wchar_t 713 { 714 typedef wchar_t __integral_type; 715 typedef atomic_wchar_t __base_type; 716 717 atomic() noexcept = default; 718 ~atomic() noexcept = default; 719 atomic(const atomic&) = delete; 720 atomic& operator=(const atomic&) = delete; 721 atomic& operator=(const atomic&) volatile = delete; 722 723 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 724 725 using __base_type::operator __integral_type; 726 using __base_type::operator=; 727 }; 728 729 /// Explicit specialization for char16_t. 730 template<> 731 struct atomic<char16_t> : public atomic_char16_t 732 { 733 typedef char16_t __integral_type; 734 typedef atomic_char16_t __base_type; 735 736 atomic() noexcept = default; 737 ~atomic() noexcept = default; 738 atomic(const atomic&) = delete; 739 atomic& operator=(const atomic&) = delete; 740 atomic& operator=(const atomic&) volatile = delete; 741 742 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 743 744 using __base_type::operator __integral_type; 745 using __base_type::operator=; 746 }; 747 748 /// Explicit specialization for char32_t. 749 template<> 750 struct atomic<char32_t> : public atomic_char32_t 751 { 752 typedef char32_t __integral_type; 753 typedef atomic_char32_t __base_type; 754 755 atomic() noexcept = default; 756 ~atomic() noexcept = default; 757 atomic(const atomic&) = delete; 758 atomic& operator=(const atomic&) = delete; 759 atomic& operator=(const atomic&) volatile = delete; 760 761 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 762 763 using __base_type::operator __integral_type; 764 using __base_type::operator=; 765 }; 766 767 768 // Function definitions, atomic_flag operations. 769 inline bool 770 atomic_flag_test_and_set_explicit(atomic_flag* __a, 771 memory_order __m) noexcept 772 { return __a->test_and_set(__m); } 773 774 inline bool 775 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 776 memory_order __m) noexcept 777 { return __a->test_and_set(__m); } 778 779 inline void 780 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept 781 { __a->clear(__m); } 782 783 inline void 784 atomic_flag_clear_explicit(volatile atomic_flag* __a, 785 memory_order __m) noexcept 786 { __a->clear(__m); } 787 788 inline bool 789 atomic_flag_test_and_set(atomic_flag* __a) noexcept 790 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 791 792 inline bool 793 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept 794 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 795 796 inline void 797 atomic_flag_clear(atomic_flag* __a) noexcept 798 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 799 800 inline void 801 atomic_flag_clear(volatile atomic_flag* __a) noexcept 802 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 803 804 805 // Function templates generally applicable to atomic types. 806 template<typename _ITp> 807 inline bool 808 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept 809 { return __a->is_lock_free(); } 810 811 template<typename _ITp> 812 inline bool 813 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept 814 { return __a->is_lock_free(); } 815 816 template<typename _ITp> 817 inline void 818 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept 819 { __a->store(__i, memory_order_relaxed); } 820 821 template<typename _ITp> 822 inline void 823 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept 824 { __a->store(__i, memory_order_relaxed); } 825 826 template<typename _ITp> 827 inline void 828 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, 829 memory_order __m) noexcept 830 { __a->store(__i, __m); } 831 832 template<typename _ITp> 833 inline void 834 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 835 memory_order __m) noexcept 836 { __a->store(__i, __m); } 837 838 template<typename _ITp> 839 inline _ITp 840 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept 841 { return __a->load(__m); } 842 843 template<typename _ITp> 844 inline _ITp 845 atomic_load_explicit(const volatile atomic<_ITp>* __a, 846 memory_order __m) noexcept 847 { return __a->load(__m); } 848 849 template<typename _ITp> 850 inline _ITp 851 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 852 memory_order __m) noexcept 853 { return __a->exchange(__i, __m); } 854 855 template<typename _ITp> 856 inline _ITp 857 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 858 memory_order __m) noexcept 859 { return __a->exchange(__i, __m); } 860 861 template<typename _ITp> 862 inline bool 863 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 864 _ITp* __i1, _ITp __i2, 865 memory_order __m1, 866 memory_order __m2) noexcept 867 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 868 869 template<typename _ITp> 870 inline bool 871 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 872 _ITp* __i1, _ITp __i2, 873 memory_order __m1, 874 memory_order __m2) noexcept 875 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 876 877 template<typename _ITp> 878 inline bool 879 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 880 _ITp* __i1, _ITp __i2, 881 memory_order __m1, 882 memory_order __m2) noexcept 883 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 884 885 template<typename _ITp> 886 inline bool 887 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 888 _ITp* __i1, _ITp __i2, 889 memory_order __m1, 890 memory_order __m2) noexcept 891 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 892 893 894 template<typename _ITp> 895 inline void 896 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept 897 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 898 899 template<typename _ITp> 900 inline void 901 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept 902 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 903 904 template<typename _ITp> 905 inline _ITp 906 atomic_load(const atomic<_ITp>* __a) noexcept 907 { return atomic_load_explicit(__a, memory_order_seq_cst); } 908 909 template<typename _ITp> 910 inline _ITp 911 atomic_load(const volatile atomic<_ITp>* __a) noexcept 912 { return atomic_load_explicit(__a, memory_order_seq_cst); } 913 914 template<typename _ITp> 915 inline _ITp 916 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept 917 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 918 919 template<typename _ITp> 920 inline _ITp 921 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept 922 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 923 924 template<typename _ITp> 925 inline bool 926 atomic_compare_exchange_weak(atomic<_ITp>* __a, 927 _ITp* __i1, _ITp __i2) noexcept 928 { 929 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 930 memory_order_seq_cst, 931 memory_order_seq_cst); 932 } 933 934 template<typename _ITp> 935 inline bool 936 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 937 _ITp* __i1, _ITp __i2) noexcept 938 { 939 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 940 memory_order_seq_cst, 941 memory_order_seq_cst); 942 } 943 944 template<typename _ITp> 945 inline bool 946 atomic_compare_exchange_strong(atomic<_ITp>* __a, 947 _ITp* __i1, _ITp __i2) noexcept 948 { 949 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 950 memory_order_seq_cst, 951 memory_order_seq_cst); 952 } 953 954 template<typename _ITp> 955 inline bool 956 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 957 _ITp* __i1, _ITp __i2) noexcept 958 { 959 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 960 memory_order_seq_cst, 961 memory_order_seq_cst); 962 } 963 964 // Function templates for atomic_integral operations only, using 965 // __atomic_base. Template argument should be constricted to 966 // intergral types as specified in the standard, excluding address 967 // types. 968 template<typename _ITp> 969 inline _ITp 970 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 971 memory_order __m) noexcept 972 { return __a->fetch_add(__i, __m); } 973 974 template<typename _ITp> 975 inline _ITp 976 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 977 memory_order __m) noexcept 978 { return __a->fetch_add(__i, __m); } 979 980 template<typename _ITp> 981 inline _ITp 982 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 983 memory_order __m) noexcept 984 { return __a->fetch_sub(__i, __m); } 985 986 template<typename _ITp> 987 inline _ITp 988 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 989 memory_order __m) noexcept 990 { return __a->fetch_sub(__i, __m); } 991 992 template<typename _ITp> 993 inline _ITp 994 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 995 memory_order __m) noexcept 996 { return __a->fetch_and(__i, __m); } 997 998 template<typename _ITp> 999 inline _ITp 1000 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1001 memory_order __m) noexcept 1002 { return __a->fetch_and(__i, __m); } 1003 1004 template<typename _ITp> 1005 inline _ITp 1006 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1007 memory_order __m) noexcept 1008 { return __a->fetch_or(__i, __m); } 1009 1010 template<typename _ITp> 1011 inline _ITp 1012 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1013 memory_order __m) noexcept 1014 { return __a->fetch_or(__i, __m); } 1015 1016 template<typename _ITp> 1017 inline _ITp 1018 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1019 memory_order __m) noexcept 1020 { return __a->fetch_xor(__i, __m); } 1021 1022 template<typename _ITp> 1023 inline _ITp 1024 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1025 memory_order __m) noexcept 1026 { return __a->fetch_xor(__i, __m); } 1027 1028 template<typename _ITp> 1029 inline _ITp 1030 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1031 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1032 1033 template<typename _ITp> 1034 inline _ITp 1035 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1036 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1037 1038 template<typename _ITp> 1039 inline _ITp 1040 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1041 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1042 1043 template<typename _ITp> 1044 inline _ITp 1045 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1046 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1047 1048 template<typename _ITp> 1049 inline _ITp 1050 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1051 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1052 1053 template<typename _ITp> 1054 inline _ITp 1055 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1056 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1057 1058 template<typename _ITp> 1059 inline _ITp 1060 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1061 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1062 1063 template<typename _ITp> 1064 inline _ITp 1065 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1066 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1067 1068 template<typename _ITp> 1069 inline _ITp 1070 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1071 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1072 1073 template<typename _ITp> 1074 inline _ITp 1075 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1076 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1077 1078 1079 // Partial specializations for pointers. 1080 template<typename _ITp> 1081 inline _ITp* 1082 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1083 memory_order __m) noexcept 1084 { return __a->fetch_add(__d, __m); } 1085 1086 template<typename _ITp> 1087 inline _ITp* 1088 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 1089 memory_order __m) noexcept 1090 { return __a->fetch_add(__d, __m); } 1091 1092 template<typename _ITp> 1093 inline _ITp* 1094 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1095 { return __a->fetch_add(__d); } 1096 1097 template<typename _ITp> 1098 inline _ITp* 1099 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1100 { return __a->fetch_add(__d); } 1101 1102 template<typename _ITp> 1103 inline _ITp* 1104 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 1105 ptrdiff_t __d, memory_order __m) noexcept 1106 { return __a->fetch_sub(__d, __m); } 1107 1108 template<typename _ITp> 1109 inline _ITp* 1110 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1111 memory_order __m) noexcept 1112 { return __a->fetch_sub(__d, __m); } 1113 1114 template<typename _ITp> 1115 inline _ITp* 1116 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1117 { return __a->fetch_sub(__d); } 1118 1119 template<typename _ITp> 1120 inline _ITp* 1121 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1122 { return __a->fetch_sub(__d); } 1123 // @} group atomics 1124 1125_GLIBCXX_END_NAMESPACE_VERSION 1126} // namespace 1127 1128#endif 1129