1// -*- C++ -*- header. 2 3// Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc. 4// 5// This file is part of the GNU ISO C++ Library. This library is free 6// software; you can redistribute it and/or modify it under the 7// terms of the GNU General Public License as published by the 8// Free Software Foundation; either version 3, or (at your option) 9// any later version. 10 11// This library is distributed in the hope that it will be useful, 12// but WITHOUT ANY WARRANTY; without even the implied warranty of 13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14// GNU General Public License for more details. 15 16// Under Section 7 of GPL version 3, you are granted additional 17// permissions described in the GCC Runtime Library Exception, version 18// 3.1, as published by the Free Software Foundation. 19 20// You should have received a copy of the GNU General Public License and 21// a copy of the GCC Runtime Library Exception along with this program; 22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23// <http://www.gnu.org/licenses/>. 24 25/** @file include/atomic 26 * This is a Standard C++ Library header. 27 */ 28 29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 31 32#ifndef _GLIBCXX_ATOMIC 33#define _GLIBCXX_ATOMIC 1 34 35#pragma GCC system_header 36 37#ifndef __GXX_EXPERIMENTAL_CXX0X__ 38# include <bits/c++0x_warning.h> 39#endif 40 41#include <bits/atomic_base.h> 42 43namespace std _GLIBCXX_VISIBILITY(default) 44{ 45_GLIBCXX_BEGIN_NAMESPACE_VERSION 46 47 /** 48 * @addtogroup atomics 49 * @{ 50 */ 51 52 /// atomic_bool 53 // NB: No operators or fetch-operations for this type. 54 struct atomic_bool 55 { 56 private: 57 __atomic_base<bool> _M_base; 58 59 public: 60 atomic_bool() noexcept = default; 61 ~atomic_bool() noexcept = default; 62 atomic_bool(const atomic_bool&) = delete; 63 atomic_bool& operator=(const atomic_bool&) = delete; 64 atomic_bool& operator=(const atomic_bool&) volatile = delete; 65 66 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } 67 68 bool 69 operator=(bool __i) noexcept 70 { return _M_base.operator=(__i); } 71 72 bool 73 operator=(bool __i) volatile noexcept 74 { return _M_base.operator=(__i); } 75 76 operator bool() const noexcept 77 { return _M_base.load(); } 78 79 operator bool() const volatile noexcept 80 { return _M_base.load(); } 81 82 bool 83 is_lock_free() const noexcept { return _M_base.is_lock_free(); } 84 85 bool 86 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } 87 88 void 89 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept 90 { _M_base.store(__i, __m); } 91 92 void 93 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept 94 { _M_base.store(__i, __m); } 95 96 bool 97 load(memory_order __m = memory_order_seq_cst) const noexcept 98 { return _M_base.load(__m); } 99 100 bool 101 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 102 { return _M_base.load(__m); } 103 104 bool 105 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept 106 { return _M_base.exchange(__i, __m); } 107 108 bool 109 exchange(bool __i, 110 memory_order __m = memory_order_seq_cst) volatile noexcept 111 { return _M_base.exchange(__i, __m); } 112 113 bool 114 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 115 memory_order __m2) noexcept 116 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 117 118 bool 119 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 120 memory_order __m2) volatile noexcept 121 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 122 123 bool 124 compare_exchange_weak(bool& __i1, bool __i2, 125 memory_order __m = memory_order_seq_cst) noexcept 126 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 127 128 bool 129 compare_exchange_weak(bool& __i1, bool __i2, 130 memory_order __m = memory_order_seq_cst) volatile noexcept 131 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 132 133 bool 134 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 135 memory_order __m2) noexcept 136 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 137 138 bool 139 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 140 memory_order __m2) volatile noexcept 141 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 142 143 bool 144 compare_exchange_strong(bool& __i1, bool __i2, 145 memory_order __m = memory_order_seq_cst) noexcept 146 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 147 148 bool 149 compare_exchange_strong(bool& __i1, bool __i2, 150 memory_order __m = memory_order_seq_cst) volatile noexcept 151 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 152 }; 153 154 155 /// atomic 156 /// 29.4.3, Generic atomic type, primary class template. 157 template<typename _Tp> 158 struct atomic 159 { 160 private: 161 _Tp _M_i; 162 163 public: 164 atomic() noexcept = default; 165 ~atomic() noexcept = default; 166 atomic(const atomic&) = delete; 167 atomic& operator=(const atomic&) = delete; 168 atomic& operator=(const atomic&) volatile = delete; 169 170 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } 171 172 operator _Tp() const noexcept 173 { return load(); } 174 175 operator _Tp() const volatile noexcept 176 { return load(); } 177 178 _Tp 179 operator=(_Tp __i) noexcept 180 { store(__i); return __i; } 181 182 _Tp 183 operator=(_Tp __i) volatile noexcept 184 { store(__i); return __i; } 185 186 bool 187 is_lock_free() const noexcept 188 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); } 189 190 bool 191 is_lock_free() const volatile noexcept 192 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); } 193 194 void 195 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 196 { __atomic_store(&_M_i, &__i, _m); } 197 198 void 199 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept 200 { __atomic_store(&_M_i, &__i, _m); } 201 202 _Tp 203 load(memory_order _m = memory_order_seq_cst) const noexcept 204 { 205 _Tp tmp; 206 __atomic_load(&_M_i, &tmp, _m); 207 return tmp; 208 } 209 210 _Tp 211 load(memory_order _m = memory_order_seq_cst) const volatile noexcept 212 { 213 _Tp tmp; 214 __atomic_load(&_M_i, &tmp, _m); 215 return tmp; 216 } 217 218 _Tp 219 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 220 { 221 _Tp tmp; 222 __atomic_exchange(&_M_i, &__i, &tmp, _m); 223 return tmp; 224 } 225 226 _Tp 227 exchange(_Tp __i, 228 memory_order _m = memory_order_seq_cst) volatile noexcept 229 { 230 _Tp tmp; 231 __atomic_exchange(&_M_i, &__i, &tmp, _m); 232 return tmp; 233 } 234 235 bool 236 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 237 memory_order __f) noexcept 238 { 239 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 240 } 241 242 bool 243 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 244 memory_order __f) volatile noexcept 245 { 246 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 247 } 248 249 bool 250 compare_exchange_weak(_Tp& __e, _Tp __i, 251 memory_order __m = memory_order_seq_cst) noexcept 252 { return compare_exchange_weak(__e, __i, __m, __m); } 253 254 bool 255 compare_exchange_weak(_Tp& __e, _Tp __i, 256 memory_order __m = memory_order_seq_cst) volatile noexcept 257 { return compare_exchange_weak(__e, __i, __m, __m); } 258 259 bool 260 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 261 memory_order __f) noexcept 262 { 263 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 264 } 265 266 bool 267 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 268 memory_order __f) volatile noexcept 269 { 270 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 271 } 272 273 bool 274 compare_exchange_strong(_Tp& __e, _Tp __i, 275 memory_order __m = memory_order_seq_cst) noexcept 276 { return compare_exchange_strong(__e, __i, __m, __m); } 277 278 bool 279 compare_exchange_strong(_Tp& __e, _Tp __i, 280 memory_order __m = memory_order_seq_cst) volatile noexcept 281 { return compare_exchange_strong(__e, __i, __m, __m); } 282 }; 283 284 285 /// Partial specialization for pointer types. 286 template<typename _Tp> 287 struct atomic<_Tp*> 288 { 289 typedef _Tp* __pointer_type; 290 typedef __atomic_base<_Tp*> __base_type; 291 __base_type _M_b; 292 293 atomic() noexcept = default; 294 ~atomic() noexcept = default; 295 atomic(const atomic&) = delete; 296 atomic& operator=(const atomic&) = delete; 297 atomic& operator=(const atomic&) volatile = delete; 298 299 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } 300 301 operator __pointer_type() const noexcept 302 { return __pointer_type(_M_b); } 303 304 operator __pointer_type() const volatile noexcept 305 { return __pointer_type(_M_b); } 306 307 __pointer_type 308 operator=(__pointer_type __p) noexcept 309 { return _M_b.operator=(__p); } 310 311 __pointer_type 312 operator=(__pointer_type __p) volatile noexcept 313 { return _M_b.operator=(__p); } 314 315 __pointer_type 316 operator++(int) noexcept 317 { return _M_b++; } 318 319 __pointer_type 320 operator++(int) volatile noexcept 321 { return _M_b++; } 322 323 __pointer_type 324 operator--(int) noexcept 325 { return _M_b--; } 326 327 __pointer_type 328 operator--(int) volatile noexcept 329 { return _M_b--; } 330 331 __pointer_type 332 operator++() noexcept 333 { return ++_M_b; } 334 335 __pointer_type 336 operator++() volatile noexcept 337 { return ++_M_b; } 338 339 __pointer_type 340 operator--() noexcept 341 { return --_M_b; } 342 343 __pointer_type 344 operator--() volatile noexcept 345 { return --_M_b; } 346 347 __pointer_type 348 operator+=(ptrdiff_t __d) noexcept 349 { return _M_b.operator+=(__d); } 350 351 __pointer_type 352 operator+=(ptrdiff_t __d) volatile noexcept 353 { return _M_b.operator+=(__d); } 354 355 __pointer_type 356 operator-=(ptrdiff_t __d) noexcept 357 { return _M_b.operator-=(__d); } 358 359 __pointer_type 360 operator-=(ptrdiff_t __d) volatile noexcept 361 { return _M_b.operator-=(__d); } 362 363 bool 364 is_lock_free() const noexcept 365 { return _M_b.is_lock_free(); } 366 367 bool 368 is_lock_free() const volatile noexcept 369 { return _M_b.is_lock_free(); } 370 371 void 372 store(__pointer_type __p, 373 memory_order __m = memory_order_seq_cst) noexcept 374 { return _M_b.store(__p, __m); } 375 376 void 377 store(__pointer_type __p, 378 memory_order __m = memory_order_seq_cst) volatile noexcept 379 { return _M_b.store(__p, __m); } 380 381 __pointer_type 382 load(memory_order __m = memory_order_seq_cst) const noexcept 383 { return _M_b.load(__m); } 384 385 __pointer_type 386 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 387 { return _M_b.load(__m); } 388 389 __pointer_type 390 exchange(__pointer_type __p, 391 memory_order __m = memory_order_seq_cst) noexcept 392 { return _M_b.exchange(__p, __m); } 393 394 __pointer_type 395 exchange(__pointer_type __p, 396 memory_order __m = memory_order_seq_cst) volatile noexcept 397 { return _M_b.exchange(__p, __m); } 398 399 bool 400 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 401 memory_order __m1, memory_order __m2) noexcept 402 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 403 404 bool 405 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 406 memory_order __m1, 407 memory_order __m2) volatile noexcept 408 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 409 410 bool 411 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 412 memory_order __m = memory_order_seq_cst) noexcept 413 { 414 return compare_exchange_weak(__p1, __p2, __m, 415 __cmpexch_failure_order(__m)); 416 } 417 418 bool 419 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 420 memory_order __m = memory_order_seq_cst) volatile noexcept 421 { 422 return compare_exchange_weak(__p1, __p2, __m, 423 __cmpexch_failure_order(__m)); 424 } 425 426 bool 427 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 428 memory_order __m1, memory_order __m2) noexcept 429 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 430 431 bool 432 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 433 memory_order __m1, 434 memory_order __m2) volatile noexcept 435 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 436 437 bool 438 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 439 memory_order __m = memory_order_seq_cst) noexcept 440 { 441 return _M_b.compare_exchange_strong(__p1, __p2, __m, 442 __cmpexch_failure_order(__m)); 443 } 444 445 bool 446 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 447 memory_order __m = memory_order_seq_cst) volatile noexcept 448 { 449 return _M_b.compare_exchange_strong(__p1, __p2, __m, 450 __cmpexch_failure_order(__m)); 451 } 452 453 __pointer_type 454 fetch_add(ptrdiff_t __d, 455 memory_order __m = memory_order_seq_cst) noexcept 456 { return _M_b.fetch_add(__d, __m); } 457 458 __pointer_type 459 fetch_add(ptrdiff_t __d, 460 memory_order __m = memory_order_seq_cst) volatile noexcept 461 { return _M_b.fetch_add(__d, __m); } 462 463 __pointer_type 464 fetch_sub(ptrdiff_t __d, 465 memory_order __m = memory_order_seq_cst) noexcept 466 { return _M_b.fetch_sub(__d, __m); } 467 468 __pointer_type 469 fetch_sub(ptrdiff_t __d, 470 memory_order __m = memory_order_seq_cst) volatile noexcept 471 { return _M_b.fetch_sub(__d, __m); } 472 }; 473 474 475 /// Explicit specialization for bool. 476 template<> 477 struct atomic<bool> : public atomic_bool 478 { 479 typedef bool __integral_type; 480 typedef atomic_bool __base_type; 481 482 atomic() noexcept = default; 483 ~atomic() noexcept = default; 484 atomic(const atomic&) = delete; 485 atomic& operator=(const atomic&) = delete; 486 atomic& operator=(const atomic&) volatile = delete; 487 488 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 489 490 using __base_type::operator __integral_type; 491 using __base_type::operator=; 492 }; 493 494 /// Explicit specialization for char. 495 template<> 496 struct atomic<char> : public atomic_char 497 { 498 typedef char __integral_type; 499 typedef atomic_char __base_type; 500 501 atomic() noexcept = default; 502 ~atomic() noexcept = default; 503 atomic(const atomic&) = delete; 504 atomic& operator=(const atomic&) = delete; 505 atomic& operator=(const atomic&) volatile = delete; 506 507 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 508 509 using __base_type::operator __integral_type; 510 using __base_type::operator=; 511 }; 512 513 /// Explicit specialization for signed char. 514 template<> 515 struct atomic<signed char> : public atomic_schar 516 { 517 typedef signed char __integral_type; 518 typedef atomic_schar __base_type; 519 520 atomic() noexcept= default; 521 ~atomic() noexcept = default; 522 atomic(const atomic&) = delete; 523 atomic& operator=(const atomic&) = delete; 524 atomic& operator=(const atomic&) volatile = delete; 525 526 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 527 528 using __base_type::operator __integral_type; 529 using __base_type::operator=; 530 }; 531 532 /// Explicit specialization for unsigned char. 533 template<> 534 struct atomic<unsigned char> : public atomic_uchar 535 { 536 typedef unsigned char __integral_type; 537 typedef atomic_uchar __base_type; 538 539 atomic() noexcept= default; 540 ~atomic() noexcept = default; 541 atomic(const atomic&) = delete; 542 atomic& operator=(const atomic&) = delete; 543 atomic& operator=(const atomic&) volatile = delete; 544 545 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 546 547 using __base_type::operator __integral_type; 548 using __base_type::operator=; 549 }; 550 551 /// Explicit specialization for short. 552 template<> 553 struct atomic<short> : public atomic_short 554 { 555 typedef short __integral_type; 556 typedef atomic_short __base_type; 557 558 atomic() noexcept = default; 559 ~atomic() noexcept = default; 560 atomic(const atomic&) = delete; 561 atomic& operator=(const atomic&) = delete; 562 atomic& operator=(const atomic&) volatile = delete; 563 564 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 565 566 using __base_type::operator __integral_type; 567 using __base_type::operator=; 568 }; 569 570 /// Explicit specialization for unsigned short. 571 template<> 572 struct atomic<unsigned short> : public atomic_ushort 573 { 574 typedef unsigned short __integral_type; 575 typedef atomic_ushort __base_type; 576 577 atomic() noexcept = default; 578 ~atomic() noexcept = default; 579 atomic(const atomic&) = delete; 580 atomic& operator=(const atomic&) = delete; 581 atomic& operator=(const atomic&) volatile = delete; 582 583 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 584 585 using __base_type::operator __integral_type; 586 using __base_type::operator=; 587 }; 588 589 /// Explicit specialization for int. 590 template<> 591 struct atomic<int> : atomic_int 592 { 593 typedef int __integral_type; 594 typedef atomic_int __base_type; 595 596 atomic() noexcept = default; 597 ~atomic() noexcept = default; 598 atomic(const atomic&) = delete; 599 atomic& operator=(const atomic&) = delete; 600 atomic& operator=(const atomic&) volatile = delete; 601 602 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 603 604 using __base_type::operator __integral_type; 605 using __base_type::operator=; 606 }; 607 608 /// Explicit specialization for unsigned int. 609 template<> 610 struct atomic<unsigned int> : public atomic_uint 611 { 612 typedef unsigned int __integral_type; 613 typedef atomic_uint __base_type; 614 615 atomic() noexcept = default; 616 ~atomic() noexcept = default; 617 atomic(const atomic&) = delete; 618 atomic& operator=(const atomic&) = delete; 619 atomic& operator=(const atomic&) volatile = delete; 620 621 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 622 623 using __base_type::operator __integral_type; 624 using __base_type::operator=; 625 }; 626 627 /// Explicit specialization for long. 628 template<> 629 struct atomic<long> : public atomic_long 630 { 631 typedef long __integral_type; 632 typedef atomic_long __base_type; 633 634 atomic() noexcept = default; 635 ~atomic() noexcept = default; 636 atomic(const atomic&) = delete; 637 atomic& operator=(const atomic&) = delete; 638 atomic& operator=(const atomic&) volatile = delete; 639 640 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 641 642 using __base_type::operator __integral_type; 643 using __base_type::operator=; 644 }; 645 646 /// Explicit specialization for unsigned long. 647 template<> 648 struct atomic<unsigned long> : public atomic_ulong 649 { 650 typedef unsigned long __integral_type; 651 typedef atomic_ulong __base_type; 652 653 atomic() noexcept = default; 654 ~atomic() noexcept = default; 655 atomic(const atomic&) = delete; 656 atomic& operator=(const atomic&) = delete; 657 atomic& operator=(const atomic&) volatile = delete; 658 659 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 660 661 using __base_type::operator __integral_type; 662 using __base_type::operator=; 663 }; 664 665 /// Explicit specialization for long long. 666 template<> 667 struct atomic<long long> : public atomic_llong 668 { 669 typedef long long __integral_type; 670 typedef atomic_llong __base_type; 671 672 atomic() noexcept = default; 673 ~atomic() noexcept = default; 674 atomic(const atomic&) = delete; 675 atomic& operator=(const atomic&) = delete; 676 atomic& operator=(const atomic&) volatile = delete; 677 678 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 679 680 using __base_type::operator __integral_type; 681 using __base_type::operator=; 682 }; 683 684 /// Explicit specialization for unsigned long long. 685 template<> 686 struct atomic<unsigned long long> : public atomic_ullong 687 { 688 typedef unsigned long long __integral_type; 689 typedef atomic_ullong __base_type; 690 691 atomic() noexcept = default; 692 ~atomic() noexcept = default; 693 atomic(const atomic&) = delete; 694 atomic& operator=(const atomic&) = delete; 695 atomic& operator=(const atomic&) volatile = delete; 696 697 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 698 699 using __base_type::operator __integral_type; 700 using __base_type::operator=; 701 }; 702 703 /// Explicit specialization for wchar_t. 704 template<> 705 struct atomic<wchar_t> : public atomic_wchar_t 706 { 707 typedef wchar_t __integral_type; 708 typedef atomic_wchar_t __base_type; 709 710 atomic() noexcept = default; 711 ~atomic() noexcept = default; 712 atomic(const atomic&) = delete; 713 atomic& operator=(const atomic&) = delete; 714 atomic& operator=(const atomic&) volatile = delete; 715 716 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 717 718 using __base_type::operator __integral_type; 719 using __base_type::operator=; 720 }; 721 722 /// Explicit specialization for char16_t. 723 template<> 724 struct atomic<char16_t> : public atomic_char16_t 725 { 726 typedef char16_t __integral_type; 727 typedef atomic_char16_t __base_type; 728 729 atomic() noexcept = default; 730 ~atomic() noexcept = default; 731 atomic(const atomic&) = delete; 732 atomic& operator=(const atomic&) = delete; 733 atomic& operator=(const atomic&) volatile = delete; 734 735 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 736 737 using __base_type::operator __integral_type; 738 using __base_type::operator=; 739 }; 740 741 /// Explicit specialization for char32_t. 742 template<> 743 struct atomic<char32_t> : public atomic_char32_t 744 { 745 typedef char32_t __integral_type; 746 typedef atomic_char32_t __base_type; 747 748 atomic() noexcept = default; 749 ~atomic() noexcept = default; 750 atomic(const atomic&) = delete; 751 atomic& operator=(const atomic&) = delete; 752 atomic& operator=(const atomic&) volatile = delete; 753 754 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 755 756 using __base_type::operator __integral_type; 757 using __base_type::operator=; 758 }; 759 760 761 // Function definitions, atomic_flag operations. 762 inline bool 763 atomic_flag_test_and_set_explicit(atomic_flag* __a, 764 memory_order __m) noexcept 765 { return __a->test_and_set(__m); } 766 767 inline bool 768 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 769 memory_order __m) noexcept 770 { return __a->test_and_set(__m); } 771 772 inline void 773 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept 774 { __a->clear(__m); } 775 776 inline void 777 atomic_flag_clear_explicit(volatile atomic_flag* __a, 778 memory_order __m) noexcept 779 { __a->clear(__m); } 780 781 inline bool 782 atomic_flag_test_and_set(atomic_flag* __a) noexcept 783 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 784 785 inline bool 786 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept 787 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 788 789 inline void 790 atomic_flag_clear(atomic_flag* __a) noexcept 791 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 792 793 inline void 794 atomic_flag_clear(volatile atomic_flag* __a) noexcept 795 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 796 797 798 // Function templates generally applicable to atomic types. 799 template<typename _ITp> 800 inline bool 801 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept 802 { return __a->is_lock_free(); } 803 804 template<typename _ITp> 805 inline bool 806 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept 807 { return __a->is_lock_free(); } 808 809 template<typename _ITp> 810 inline void 811 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; 812 813 template<typename _ITp> 814 inline void 815 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; 816 817 template<typename _ITp> 818 inline void 819 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, 820 memory_order __m) noexcept 821 { __a->store(__i, __m); } 822 823 template<typename _ITp> 824 inline void 825 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 826 memory_order __m) noexcept 827 { __a->store(__i, __m); } 828 829 template<typename _ITp> 830 inline _ITp 831 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept 832 { return __a->load(__m); } 833 834 template<typename _ITp> 835 inline _ITp 836 atomic_load_explicit(const volatile atomic<_ITp>* __a, 837 memory_order __m) noexcept 838 { return __a->load(__m); } 839 840 template<typename _ITp> 841 inline _ITp 842 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 843 memory_order __m) noexcept 844 { return __a->exchange(__i, __m); } 845 846 template<typename _ITp> 847 inline _ITp 848 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 849 memory_order __m) noexcept 850 { return __a->exchange(__i, __m); } 851 852 template<typename _ITp> 853 inline bool 854 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 855 _ITp* __i1, _ITp __i2, 856 memory_order __m1, 857 memory_order __m2) noexcept 858 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 859 860 template<typename _ITp> 861 inline bool 862 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 863 _ITp* __i1, _ITp __i2, 864 memory_order __m1, 865 memory_order __m2) noexcept 866 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 867 868 template<typename _ITp> 869 inline bool 870 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 871 _ITp* __i1, _ITp __i2, 872 memory_order __m1, 873 memory_order __m2) noexcept 874 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 875 876 template<typename _ITp> 877 inline bool 878 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 879 _ITp* __i1, _ITp __i2, 880 memory_order __m1, 881 memory_order __m2) noexcept 882 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 883 884 885 template<typename _ITp> 886 inline void 887 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept 888 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 889 890 template<typename _ITp> 891 inline void 892 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept 893 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 894 895 template<typename _ITp> 896 inline _ITp 897 atomic_load(const atomic<_ITp>* __a) noexcept 898 { return atomic_load_explicit(__a, memory_order_seq_cst); } 899 900 template<typename _ITp> 901 inline _ITp 902 atomic_load(const volatile atomic<_ITp>* __a) noexcept 903 { return atomic_load_explicit(__a, memory_order_seq_cst); } 904 905 template<typename _ITp> 906 inline _ITp 907 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept 908 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 909 910 template<typename _ITp> 911 inline _ITp 912 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept 913 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 914 915 template<typename _ITp> 916 inline bool 917 atomic_compare_exchange_weak(atomic<_ITp>* __a, 918 _ITp* __i1, _ITp __i2) noexcept 919 { 920 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 921 memory_order_seq_cst, 922 memory_order_seq_cst); 923 } 924 925 template<typename _ITp> 926 inline bool 927 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 928 _ITp* __i1, _ITp __i2) noexcept 929 { 930 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 931 memory_order_seq_cst, 932 memory_order_seq_cst); 933 } 934 935 template<typename _ITp> 936 inline bool 937 atomic_compare_exchange_strong(atomic<_ITp>* __a, 938 _ITp* __i1, _ITp __i2) noexcept 939 { 940 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 941 memory_order_seq_cst, 942 memory_order_seq_cst); 943 } 944 945 template<typename _ITp> 946 inline bool 947 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 948 _ITp* __i1, _ITp __i2) noexcept 949 { 950 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 951 memory_order_seq_cst, 952 memory_order_seq_cst); 953 } 954 955 // Function templates for atomic_integral operations only, using 956 // __atomic_base. Template argument should be constricted to 957 // intergral types as specified in the standard, excluding address 958 // types. 959 template<typename _ITp> 960 inline _ITp 961 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 962 memory_order __m) noexcept 963 { return __a->fetch_add(__i, __m); } 964 965 template<typename _ITp> 966 inline _ITp 967 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 968 memory_order __m) noexcept 969 { return __a->fetch_add(__i, __m); } 970 971 template<typename _ITp> 972 inline _ITp 973 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 974 memory_order __m) noexcept 975 { return __a->fetch_sub(__i, __m); } 976 977 template<typename _ITp> 978 inline _ITp 979 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 980 memory_order __m) noexcept 981 { return __a->fetch_sub(__i, __m); } 982 983 template<typename _ITp> 984 inline _ITp 985 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 986 memory_order __m) noexcept 987 { return __a->fetch_and(__i, __m); } 988 989 template<typename _ITp> 990 inline _ITp 991 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 992 memory_order __m) noexcept 993 { return __a->fetch_and(__i, __m); } 994 995 template<typename _ITp> 996 inline _ITp 997 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 998 memory_order __m) noexcept 999 { return __a->fetch_or(__i, __m); } 1000 1001 template<typename _ITp> 1002 inline _ITp 1003 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1004 memory_order __m) noexcept 1005 { return __a->fetch_or(__i, __m); } 1006 1007 template<typename _ITp> 1008 inline _ITp 1009 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1010 memory_order __m) noexcept 1011 { return __a->fetch_xor(__i, __m); } 1012 1013 template<typename _ITp> 1014 inline _ITp 1015 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1016 memory_order __m) noexcept 1017 { return __a->fetch_xor(__i, __m); } 1018 1019 template<typename _ITp> 1020 inline _ITp 1021 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1022 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1023 1024 template<typename _ITp> 1025 inline _ITp 1026 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1027 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1028 1029 template<typename _ITp> 1030 inline _ITp 1031 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1032 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1033 1034 template<typename _ITp> 1035 inline _ITp 1036 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1037 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1038 1039 template<typename _ITp> 1040 inline _ITp 1041 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1042 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1043 1044 template<typename _ITp> 1045 inline _ITp 1046 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1047 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1048 1049 template<typename _ITp> 1050 inline _ITp 1051 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1052 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1053 1054 template<typename _ITp> 1055 inline _ITp 1056 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1057 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1058 1059 template<typename _ITp> 1060 inline _ITp 1061 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1062 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1063 1064 template<typename _ITp> 1065 inline _ITp 1066 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1067 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1068 1069 1070 // Partial specializations for pointers. 1071 template<typename _ITp> 1072 inline _ITp* 1073 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1074 memory_order __m) noexcept 1075 { return __a->fetch_add(__d, __m); } 1076 1077 template<typename _ITp> 1078 inline _ITp* 1079 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 1080 memory_order __m) noexcept 1081 { return __a->fetch_add(__d, __m); } 1082 1083 template<typename _ITp> 1084 inline _ITp* 1085 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1086 { return __a->fetch_add(__d); } 1087 1088 template<typename _ITp> 1089 inline _ITp* 1090 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1091 { return __a->fetch_add(__d); } 1092 1093 template<typename _ITp> 1094 inline _ITp* 1095 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 1096 ptrdiff_t __d, memory_order __m) noexcept 1097 { return __a->fetch_sub(__d, __m); } 1098 1099 template<typename _ITp> 1100 inline _ITp* 1101 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1102 memory_order __m) noexcept 1103 { return __a->fetch_sub(__d, __m); } 1104 1105 template<typename _ITp> 1106 inline _ITp* 1107 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1108 { return __a->fetch_sub(__d); } 1109 1110 template<typename _ITp> 1111 inline _ITp* 1112 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1113 { return __a->fetch_sub(__d); } 1114 // @} group atomics 1115 1116_GLIBCXX_END_NAMESPACE_VERSION 1117} // namespace 1118 1119#endif 1120