1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2021 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library.  This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23// <http://www.gnu.org/licenses/>.
24
25// ????????????????????????????????????????????????????????????????????
26//
27// This is a copy of the libstdc++ header, with the trivial modification
28// of ignoring the c++config.h include.  If and when the top-level build is
29// fixed so that target libraries can be built using the newly built, we can
30// delete this file.
31//
32// ????????????????????????????????????????????????????????????????????
33
34/** @file include/atomic
35 *  This is a Standard C++ Library header.
36 */
37
38// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
40
41#ifndef _GLIBCXX_ATOMIC
42#define _GLIBCXX_ATOMIC 1
43
44#define __libitm_always_inline __attribute__((always_inline))
45
46// #pragma GCC system_header
47
48// #ifndef __GXX_EXPERIMENTAL_CXX0X__
49// # include <bits/c++0x_warning.h>
50// #endif
51
52// #include <bits/atomic_base.h>
53
54namespace std // _GLIBCXX_VISIBILITY(default)
55{
56// _GLIBCXX_BEGIN_NAMESPACE_VERSION
57
58  /**
59   * @defgroup atomics Atomics
60   *
61   * Components for performing atomic operations.
62   * @{
63   */
64
65  /// Enumeration for memory_order
66  typedef enum memory_order
67    {
68      memory_order_relaxed,
69      memory_order_consume,
70      memory_order_acquire,
71      memory_order_release,
72      memory_order_acq_rel,
73      memory_order_seq_cst
74    } memory_order;
75
76  inline __libitm_always_inline memory_order
77  __calculate_memory_order(memory_order __m) noexcept
78  {
79    const bool __cond1 = __m == memory_order_release;
80    const bool __cond2 = __m == memory_order_acq_rel;
81    memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
82    memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
83    return __mo2;
84  }
85
86  inline __libitm_always_inline void
87  atomic_thread_fence(memory_order __m) noexcept
88  {
89    __atomic_thread_fence (__m);
90  }
91
92  inline __libitm_always_inline void
93  atomic_signal_fence(memory_order __m) noexcept
94  {
95    __atomic_thread_fence (__m);
96  }
97
98  /// kill_dependency
99  template<typename _Tp>
100    inline _Tp
101    kill_dependency(_Tp __y) noexcept
102    {
103      _Tp __ret(__y);
104      return __ret;
105    }
106
107  /// Lock-free Property
108
109
110#define ATOMIC_BOOL_LOCK_FREE		__GCC_ATOMIC_BOOL_LOCK_FREE
111#define ATOMIC_CHAR_LOCK_FREE		__GCC_ATOMIC_CHAR_LOCK_FREE
112#define ATOMIC_WCHAR_T_LOCK_FREE	__GCC_ATOMIC_WCHAR_T_LOCK_FREE
113#define ATOMIC_CHAR16_T_LOCK_FREE	__GCC_ATOMIC_CHAR16_T_LOCK_FREE
114#define ATOMIC_CHAR32_T_LOCK_FREE	__GCC_ATOMIC_CHAR32_T_LOCK_FREE
115#define ATOMIC_SHORT_LOCK_FREE		__GCC_ATOMIC_SHORT_LOCK_FREE
116#define ATOMIC_INT_LOCK_FREE		__GCC_ATOMIC_INT_LOCK_FREE
117#define ATOMIC_LONG_LOCK_FREE		__GCC_ATOMIC_LONG_LOCK_FREE
118#define ATOMIC_LLONG_LOCK_FREE		__GCC_ATOMIC_LLONG_LOCK_FREE
119#define ATOMIC_POINTER_LOCK_FREE	__GCC_ATOMIC_POINTER_LOCK_FREE
120
121  // Base types for atomics.
122  template<typename _IntTp>
123    struct __atomic_base;
124
125  /// atomic_char
126  typedef __atomic_base<char>  	       		atomic_char;
127
128  /// atomic_schar
129  typedef __atomic_base<signed char>	     	atomic_schar;
130
131  /// atomic_uchar
132  typedef __atomic_base<unsigned char>		atomic_uchar;
133
134  /// atomic_short
135  typedef __atomic_base<short>			atomic_short;
136
137  /// atomic_ushort
138  typedef __atomic_base<unsigned short>	 	atomic_ushort;
139
140  /// atomic_int
141  typedef __atomic_base<int>  	       		atomic_int;
142
143  /// atomic_uint
144  typedef __atomic_base<unsigned int>	     	atomic_uint;
145
146  /// atomic_long
147  typedef __atomic_base<long>  	       		atomic_long;
148
149  /// atomic_ulong
150  typedef __atomic_base<unsigned long>		atomic_ulong;
151
152  /// atomic_llong
153  typedef __atomic_base<long long>  		atomic_llong;
154
155  /// atomic_ullong
156  typedef __atomic_base<unsigned long long> 	atomic_ullong;
157
158  /// atomic_wchar_t
159  typedef __atomic_base<wchar_t>  		atomic_wchar_t;
160
161  /// atomic_char16_t
162  typedef __atomic_base<char16_t>  		atomic_char16_t;
163
164  /// atomic_char32_t
165  typedef __atomic_base<char32_t>  		atomic_char32_t;
166
167  /// atomic_char32_t
168  typedef __atomic_base<char32_t>  		atomic_char32_t;
169
170
171  /// atomic_int_least8_t
172  typedef __atomic_base<int_least8_t>  		atomic_int_least8_t;
173
174  /// atomic_uint_least8_t
175  typedef __atomic_base<uint_least8_t>	       	atomic_uint_least8_t;
176
177  /// atomic_int_least16_t
178  typedef __atomic_base<int_least16_t>	       	atomic_int_least16_t;
179
180  /// atomic_uint_least16_t
181  typedef __atomic_base<uint_least16_t>	       	atomic_uint_least16_t;
182
183  /// atomic_int_least32_t
184  typedef __atomic_base<int_least32_t>	       	atomic_int_least32_t;
185
186  /// atomic_uint_least32_t
187  typedef __atomic_base<uint_least32_t>	       	atomic_uint_least32_t;
188
189  /// atomic_int_least64_t
190  typedef __atomic_base<int_least64_t>	       	atomic_int_least64_t;
191
192  /// atomic_uint_least64_t
193  typedef __atomic_base<uint_least64_t>	       	atomic_uint_least64_t;
194
195
196  /// atomic_int_fast8_t
197  typedef __atomic_base<int_fast8_t>  		atomic_int_fast8_t;
198
199  /// atomic_uint_fast8_t
200  typedef __atomic_base<uint_fast8_t>	      	atomic_uint_fast8_t;
201
202  /// atomic_int_fast16_t
203  typedef __atomic_base<int_fast16_t>	      	atomic_int_fast16_t;
204
205  /// atomic_uint_fast16_t
206  typedef __atomic_base<uint_fast16_t>	      	atomic_uint_fast16_t;
207
208  /// atomic_int_fast32_t
209  typedef __atomic_base<int_fast32_t>	      	atomic_int_fast32_t;
210
211  /// atomic_uint_fast32_t
212  typedef __atomic_base<uint_fast32_t>	      	atomic_uint_fast32_t;
213
214  /// atomic_int_fast64_t
215  typedef __atomic_base<int_fast64_t>	      	atomic_int_fast64_t;
216
217  /// atomic_uint_fast64_t
218  typedef __atomic_base<uint_fast64_t>	      	atomic_uint_fast64_t;
219
220
221  /// atomic_intptr_t
222  typedef __atomic_base<intptr_t>  	       	atomic_intptr_t;
223
224  /// atomic_uintptr_t
225  typedef __atomic_base<uintptr_t>  	       	atomic_uintptr_t;
226
227  /// atomic_size_t
228  typedef __atomic_base<size_t>	 	       	atomic_size_t;
229
230  /// atomic_intmax_t
231  typedef __atomic_base<intmax_t>  	       	atomic_intmax_t;
232
233  /// atomic_uintmax_t
234  typedef __atomic_base<uintmax_t>  	       	atomic_uintmax_t;
235
236  /// atomic_ptrdiff_t
237  typedef __atomic_base<ptrdiff_t>  	       	atomic_ptrdiff_t;
238
239
240#define ATOMIC_VAR_INIT(_VI) { _VI }
241
242  template<typename _Tp>
243    struct atomic;
244
245  template<typename _Tp>
246    struct atomic<_Tp*>;
247
248
249  /**
250   *  @brief Base type for atomic_flag.
251   *
252   *  Base type is POD with data, allowing atomic_flag to derive from
253   *  it and meet the standard layout type requirement. In addition to
254   *  compatibilty with a C interface, this allows different
255   *  implementations of atomic_flag to use the same atomic operation
256   *  functions, via a standard conversion to the __atomic_flag_base
257   *  argument.
258  */
259  // _GLIBCXX_BEGIN_EXTERN_C
260
261  struct __atomic_flag_base
262  {
263    bool _M_i;
264  };
265
266  // _GLIBCXX_END_EXTERN_C
267
268#define ATOMIC_FLAG_INIT { false }
269
270  /// atomic_flag
271  struct atomic_flag : public __atomic_flag_base
272  {
273    atomic_flag() noexcept = default;
274    ~atomic_flag() noexcept = default;
275    atomic_flag(const atomic_flag&) = delete;
276    atomic_flag& operator=(const atomic_flag&) = delete;
277    atomic_flag& operator=(const atomic_flag&) volatile = delete;
278
279    // Conversion to ATOMIC_FLAG_INIT.
280    atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
281
282    __libitm_always_inline bool
283    test_and_set(memory_order __m = memory_order_seq_cst) noexcept
284    {
285      return __atomic_test_and_set (&_M_i, __m);
286    }
287
288    __libitm_always_inline bool
289    test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
290    {
291      return __atomic_test_and_set (&_M_i, __m);
292    }
293
294    __libitm_always_inline void
295    clear(memory_order __m = memory_order_seq_cst) noexcept
296    {
297      // __glibcxx_assert(__m != memory_order_consume);
298      // __glibcxx_assert(__m != memory_order_acquire);
299      // __glibcxx_assert(__m != memory_order_acq_rel);
300
301      __atomic_clear (&_M_i, __m);
302    }
303
304    __libitm_always_inline void
305    clear(memory_order __m = memory_order_seq_cst) volatile noexcept
306    {
307      // __glibcxx_assert(__m != memory_order_consume);
308      // __glibcxx_assert(__m != memory_order_acquire);
309      // __glibcxx_assert(__m != memory_order_acq_rel);
310
311      __atomic_clear (&_M_i, __m);
312    }
313  };
314
315
316  /// Base class for atomic integrals.
317  //
318  // For each of the integral types, define atomic_[integral type] struct
319  //
320  // atomic_bool     bool
321  // atomic_char     char
322  // atomic_schar    signed char
323  // atomic_uchar    unsigned char
324  // atomic_short    short
325  // atomic_ushort   unsigned short
326  // atomic_int      int
327  // atomic_uint     unsigned int
328  // atomic_long     long
329  // atomic_ulong    unsigned long
330  // atomic_llong    long long
331  // atomic_ullong   unsigned long long
332  // atomic_char16_t char16_t
333  // atomic_char32_t char32_t
334  // atomic_wchar_t  wchar_t
335  //
336  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
337  // 8 bytes, since that is what GCC built-in functions for atomic
338  // memory access expect.
339  template<typename _ITp>
340    struct __atomic_base
341    {
342    private:
343      typedef _ITp 	__int_type;
344
345      __int_type 	_M_i;
346
347    public:
348      __atomic_base() noexcept = default;
349      ~__atomic_base() noexcept = default;
350      __atomic_base(const __atomic_base&) = delete;
351      __atomic_base& operator=(const __atomic_base&) = delete;
352      __atomic_base& operator=(const __atomic_base&) volatile = delete;
353
354      // Requires __int_type convertible to _M_i.
355      constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
356
357      operator __int_type() const noexcept
358      { return load(); }
359
360      operator __int_type() const volatile noexcept
361      { return load(); }
362
363      __int_type
364      operator=(__int_type __i) noexcept
365      {
366	store(__i);
367	return __i;
368      }
369
370      __int_type
371      operator=(__int_type __i) volatile noexcept
372      {
373	store(__i);
374	return __i;
375      }
376
377      __int_type
378      operator++(int) noexcept
379      { return fetch_add(1); }
380
381      __int_type
382      operator++(int) volatile noexcept
383      { return fetch_add(1); }
384
385      __int_type
386      operator--(int) noexcept
387      { return fetch_sub(1); }
388
389      __int_type
390      operator--(int) volatile noexcept
391      { return fetch_sub(1); }
392
393      __int_type
394      operator++() noexcept
395      { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
396
397      __int_type
398      operator++() volatile noexcept
399      { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
400
401      __int_type
402      operator--() noexcept
403      { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
404
405      __int_type
406      operator--() volatile noexcept
407      { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
408
409      __int_type
410      operator+=(__int_type __i) noexcept
411      { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
412
413      __int_type
414      operator+=(__int_type __i) volatile noexcept
415      { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
416
417      __int_type
418      operator-=(__int_type __i) noexcept
419      { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
420
421      __int_type
422      operator-=(__int_type __i) volatile noexcept
423      { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
424
425      __int_type
426      operator&=(__int_type __i) noexcept
427      { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
428
429      __int_type
430      operator&=(__int_type __i) volatile noexcept
431      { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
432
433      __int_type
434      operator|=(__int_type __i) noexcept
435      { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
436
437      __int_type
438      operator|=(__int_type __i) volatile noexcept
439      { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
440
441      __int_type
442      operator^=(__int_type __i) noexcept
443      { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
444
445      __int_type
446      operator^=(__int_type __i) volatile noexcept
447      { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
448
449      bool
450      is_lock_free() const noexcept
451      { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
452
453      bool
454      is_lock_free() const volatile noexcept
455      { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
456
457      __libitm_always_inline void
458      store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
459      {
460	// __glibcxx_assert(__m != memory_order_acquire);
461	// __glibcxx_assert(__m != memory_order_acq_rel);
462	// __glibcxx_assert(__m != memory_order_consume);
463
464	__atomic_store_n(&_M_i, __i, __m);
465      }
466
467      __libitm_always_inline void
468      store(__int_type __i,
469	    memory_order __m = memory_order_seq_cst) volatile noexcept
470      {
471	// __glibcxx_assert(__m != memory_order_acquire);
472	// __glibcxx_assert(__m != memory_order_acq_rel);
473	// __glibcxx_assert(__m != memory_order_consume);
474
475	__atomic_store_n(&_M_i, __i, __m);
476      }
477
478      __libitm_always_inline __int_type
479      load(memory_order __m = memory_order_seq_cst) const noexcept
480      {
481	// __glibcxx_assert(__m != memory_order_release);
482	// __glibcxx_assert(__m != memory_order_acq_rel);
483
484	return __atomic_load_n(&_M_i, __m);
485      }
486
487      __libitm_always_inline __int_type
488      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
489      {
490	// __glibcxx_assert(__m != memory_order_release);
491	// __glibcxx_assert(__m != memory_order_acq_rel);
492
493	return __atomic_load_n(&_M_i, __m);
494      }
495
496      __libitm_always_inline __int_type
497      exchange(__int_type __i,
498	       memory_order __m = memory_order_seq_cst) noexcept
499      {
500	return __atomic_exchange_n(&_M_i, __i, __m);
501      }
502
503      __libitm_always_inline __int_type
504      exchange(__int_type __i,
505	       memory_order __m = memory_order_seq_cst) volatile noexcept
506      {
507	return __atomic_exchange_n(&_M_i, __i, __m);
508      }
509
510      __libitm_always_inline bool
511      compare_exchange_weak(__int_type& __i1, __int_type __i2,
512			    memory_order __m1, memory_order __m2) noexcept
513      {
514	// __glibcxx_assert(__m2 != memory_order_release);
515	// __glibcxx_assert(__m2 != memory_order_acq_rel);
516	// __glibcxx_assert(__m2 <= __m1);
517
518	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
519      }
520
521      __libitm_always_inline bool
522      compare_exchange_weak(__int_type& __i1, __int_type __i2,
523			    memory_order __m1,
524			    memory_order __m2) volatile noexcept
525      {
526	// __glibcxx_assert(__m2 != memory_order_release);
527	// __glibcxx_assert(__m2 != memory_order_acq_rel);
528	// __glibcxx_assert(__m2 <= __m1);
529
530	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
531      }
532
533      __libitm_always_inline bool
534      compare_exchange_weak(__int_type& __i1, __int_type __i2,
535			    memory_order __m = memory_order_seq_cst) noexcept
536      {
537	return compare_exchange_weak(__i1, __i2, __m,
538				     __calculate_memory_order(__m));
539      }
540
541      __libitm_always_inline bool
542      compare_exchange_weak(__int_type& __i1, __int_type __i2,
543		   memory_order __m = memory_order_seq_cst) volatile noexcept
544      {
545	return compare_exchange_weak(__i1, __i2, __m,
546				     __calculate_memory_order(__m));
547      }
548
549      __libitm_always_inline bool
550      compare_exchange_strong(__int_type& __i1, __int_type __i2,
551			      memory_order __m1, memory_order __m2) noexcept
552      {
553	// __glibcxx_assert(__m2 != memory_order_release);
554	// __glibcxx_assert(__m2 != memory_order_acq_rel);
555	// __glibcxx_assert(__m2 <= __m1);
556
557	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
558      }
559
560      __libitm_always_inline bool
561      compare_exchange_strong(__int_type& __i1, __int_type __i2,
562			      memory_order __m1,
563			      memory_order __m2) volatile noexcept
564      {
565	// __glibcxx_assert(__m2 != memory_order_release);
566	// __glibcxx_assert(__m2 != memory_order_acq_rel);
567	// __glibcxx_assert(__m2 <= __m1);
568
569	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
570      }
571
572      __libitm_always_inline bool
573      compare_exchange_strong(__int_type& __i1, __int_type __i2,
574			      memory_order __m = memory_order_seq_cst) noexcept
575      {
576	return compare_exchange_strong(__i1, __i2, __m,
577				       __calculate_memory_order(__m));
578      }
579
580      __libitm_always_inline bool
581      compare_exchange_strong(__int_type& __i1, __int_type __i2,
582		 memory_order __m = memory_order_seq_cst) volatile noexcept
583      {
584	return compare_exchange_strong(__i1, __i2, __m,
585				       __calculate_memory_order(__m));
586      }
587
588      __libitm_always_inline __int_type
589      fetch_add(__int_type __i,
590		memory_order __m = memory_order_seq_cst) noexcept
591      { return __atomic_fetch_add(&_M_i, __i, __m); }
592
593      __libitm_always_inline __int_type
594      fetch_add(__int_type __i,
595		memory_order __m = memory_order_seq_cst) volatile noexcept
596      { return __atomic_fetch_add(&_M_i, __i, __m); }
597
598      __libitm_always_inline __int_type
599      fetch_sub(__int_type __i,
600		memory_order __m = memory_order_seq_cst) noexcept
601      { return __atomic_fetch_sub(&_M_i, __i, __m); }
602
603      __libitm_always_inline __int_type
604      fetch_sub(__int_type __i,
605		memory_order __m = memory_order_seq_cst) volatile noexcept
606      { return __atomic_fetch_sub(&_M_i, __i, __m); }
607
608      __libitm_always_inline __int_type
609      fetch_and(__int_type __i,
610		memory_order __m = memory_order_seq_cst) noexcept
611      { return __atomic_fetch_and(&_M_i, __i, __m); }
612
613      __libitm_always_inline __int_type
614      fetch_and(__int_type __i,
615		memory_order __m = memory_order_seq_cst) volatile noexcept
616      { return __atomic_fetch_and(&_M_i, __i, __m); }
617
618      __libitm_always_inline __int_type
619      fetch_or(__int_type __i,
620	       memory_order __m = memory_order_seq_cst) noexcept
621      { return __atomic_fetch_or(&_M_i, __i, __m); }
622
623      __libitm_always_inline __int_type
624      fetch_or(__int_type __i,
625	       memory_order __m = memory_order_seq_cst) volatile noexcept
626      { return __atomic_fetch_or(&_M_i, __i, __m); }
627
628      __libitm_always_inline __int_type
629      fetch_xor(__int_type __i,
630		memory_order __m = memory_order_seq_cst) noexcept
631      { return __atomic_fetch_xor(&_M_i, __i, __m); }
632
633      __libitm_always_inline __int_type
634      fetch_xor(__int_type __i,
635		memory_order __m = memory_order_seq_cst) volatile noexcept
636      { return __atomic_fetch_xor(&_M_i, __i, __m); }
637    };
638
639
640  /// Partial specialization for pointer types.
641  template<typename _PTp>
642    struct __atomic_base<_PTp*>
643    {
644    private:
645      typedef _PTp* 	__pointer_type;
646
647      __pointer_type 	_M_p;
648
649    public:
650      __atomic_base() noexcept = default;
651      ~__atomic_base() noexcept = default;
652      __atomic_base(const __atomic_base&) = delete;
653      __atomic_base& operator=(const __atomic_base&) = delete;
654      __atomic_base& operator=(const __atomic_base&) volatile = delete;
655
656      // Requires __pointer_type convertible to _M_p.
657      constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
658
659      operator __pointer_type() const noexcept
660      { return load(); }
661
662      operator __pointer_type() const volatile noexcept
663      { return load(); }
664
665      __pointer_type
666      operator=(__pointer_type __p) noexcept
667      {
668	store(__p);
669	return __p;
670      }
671
672      __pointer_type
673      operator=(__pointer_type __p) volatile noexcept
674      {
675	store(__p);
676	return __p;
677      }
678
679      __pointer_type
680      operator++(int) noexcept
681      { return fetch_add(1); }
682
683      __pointer_type
684      operator++(int) volatile noexcept
685      { return fetch_add(1); }
686
687      __pointer_type
688      operator--(int) noexcept
689      { return fetch_sub(1); }
690
691      __pointer_type
692      operator--(int) volatile noexcept
693      { return fetch_sub(1); }
694
695      __pointer_type
696      operator++() noexcept
697      { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
698
699      __pointer_type
700      operator++() volatile noexcept
701      { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
702
703      __pointer_type
704      operator--() noexcept
705      { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
706
707      __pointer_type
708      operator--() volatile noexcept
709      { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
710
711      __pointer_type
712      operator+=(ptrdiff_t __d) noexcept
713      { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
714
715      __pointer_type
716      operator+=(ptrdiff_t __d) volatile noexcept
717      { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
718
719      __pointer_type
720      operator-=(ptrdiff_t __d) noexcept
721      { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
722
723      __pointer_type
724      operator-=(ptrdiff_t __d) volatile noexcept
725      { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
726
727      bool
728      is_lock_free() const noexcept
729      { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
730
731      bool
732      is_lock_free() const volatile noexcept
733      { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
734
735      __libitm_always_inline void
736      store(__pointer_type __p,
737	    memory_order __m = memory_order_seq_cst) noexcept
738      {
739	// __glibcxx_assert(__m != memory_order_acquire);
740	// __glibcxx_assert(__m != memory_order_acq_rel);
741	// __glibcxx_assert(__m != memory_order_consume);
742
743	__atomic_store_n(&_M_p, __p, __m);
744      }
745
746      __libitm_always_inline void
747      store(__pointer_type __p,
748	    memory_order __m = memory_order_seq_cst) volatile noexcept
749      {
750	// __glibcxx_assert(__m != memory_order_acquire);
751	// __glibcxx_assert(__m != memory_order_acq_rel);
752	// __glibcxx_assert(__m != memory_order_consume);
753
754	__atomic_store_n(&_M_p, __p, __m);
755      }
756
757      __libitm_always_inline __pointer_type
758      load(memory_order __m = memory_order_seq_cst) const noexcept
759      {
760	// __glibcxx_assert(__m != memory_order_release);
761	// __glibcxx_assert(__m != memory_order_acq_rel);
762
763	return __atomic_load_n(&_M_p, __m);
764      }
765
766      __libitm_always_inline __pointer_type
767      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
768      {
769	// __glibcxx_assert(__m != memory_order_release);
770	// __glibcxx_assert(__m != memory_order_acq_rel);
771
772	return __atomic_load_n(&_M_p, __m);
773      }
774
775      __libitm_always_inline __pointer_type
776      exchange(__pointer_type __p,
777	       memory_order __m = memory_order_seq_cst) noexcept
778      {
779	return __atomic_exchange_n(&_M_p, __p, __m);
780      }
781
782      __libitm_always_inline __pointer_type
783      exchange(__pointer_type __p,
784	       memory_order __m = memory_order_seq_cst) volatile noexcept
785      {
786	return __atomic_exchange_n(&_M_p, __p, __m);
787      }
788
789      __libitm_always_inline bool
790      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
791			      memory_order __m1,
792			      memory_order __m2) noexcept
793      {
794	// __glibcxx_assert(__m2 != memory_order_release);
795	// __glibcxx_assert(__m2 != memory_order_acq_rel);
796	// __glibcxx_assert(__m2 <= __m1);
797
798	return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
799      }
800
801      __libitm_always_inline bool
802      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
803			      memory_order __m1,
804			      memory_order __m2) volatile noexcept
805      {
806	// __glibcxx_assert(__m2 != memory_order_release);
807	// __glibcxx_assert(__m2 != memory_order_acq_rel);
808	// __glibcxx_assert(__m2 <= __m1);
809
810	return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
811      }
812
813      __libitm_always_inline __pointer_type
814      fetch_add(ptrdiff_t __d,
815		memory_order __m = memory_order_seq_cst) noexcept
816      { return __atomic_fetch_add(&_M_p, __d, __m); }
817
818      __libitm_always_inline __pointer_type
819      fetch_add(ptrdiff_t __d,
820		memory_order __m = memory_order_seq_cst) volatile noexcept
821      { return __atomic_fetch_add(&_M_p, __d, __m); }
822
823      __libitm_always_inline __pointer_type
824      fetch_sub(ptrdiff_t __d,
825		memory_order __m = memory_order_seq_cst) noexcept
826      { return __atomic_fetch_sub(&_M_p, __d, __m); }
827
828      __libitm_always_inline __pointer_type
829      fetch_sub(ptrdiff_t __d,
830		memory_order __m = memory_order_seq_cst) volatile noexcept
831      { return __atomic_fetch_sub(&_M_p, __d, __m); }
832    };
833
834
835  /**
836   * @addtogroup atomics
837   * @{
838   */
839
840  /// atomic_bool
841  // NB: No operators or fetch-operations for this type.
842  struct atomic_bool
843  {
844  private:
845    __atomic_base<bool>	_M_base;
846
847  public:
848    atomic_bool() noexcept = default;
849    ~atomic_bool() noexcept = default;
850    atomic_bool(const atomic_bool&) = delete;
851    atomic_bool& operator=(const atomic_bool&) = delete;
852    atomic_bool& operator=(const atomic_bool&) volatile = delete;
853
854    constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
855
856    bool
857    operator=(bool __i) noexcept
858    { return _M_base.operator=(__i); }
859
860    operator bool() const noexcept
861    { return _M_base.load(); }
862
863    operator bool() const volatile noexcept
864    { return _M_base.load(); }
865
866    bool
867    is_lock_free() const noexcept { return _M_base.is_lock_free(); }
868
869    bool
870    is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
871
872    __libitm_always_inline void
873    store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
874    { _M_base.store(__i, __m); }
875
876    __libitm_always_inline void
877    store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
878    { _M_base.store(__i, __m); }
879
880    __libitm_always_inline bool
881    load(memory_order __m = memory_order_seq_cst) const noexcept
882    { return _M_base.load(__m); }
883
884    __libitm_always_inline bool
885    load(memory_order __m = memory_order_seq_cst) const volatile noexcept
886    { return _M_base.load(__m); }
887
888    __libitm_always_inline bool
889    exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
890    { return _M_base.exchange(__i, __m); }
891
892    __libitm_always_inline bool
893    exchange(bool __i,
894	     memory_order __m = memory_order_seq_cst) volatile noexcept
895    { return _M_base.exchange(__i, __m); }
896
897    __libitm_always_inline bool
898    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
899			  memory_order __m2) noexcept
900    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
901
902    __libitm_always_inline bool
903    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
904			  memory_order __m2) volatile noexcept
905    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
906
907    __libitm_always_inline bool
908    compare_exchange_weak(bool& __i1, bool __i2,
909			  memory_order __m = memory_order_seq_cst) noexcept
910    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
911
912    __libitm_always_inline bool
913    compare_exchange_weak(bool& __i1, bool __i2,
914		     memory_order __m = memory_order_seq_cst) volatile noexcept
915    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
916
917    __libitm_always_inline bool
918    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
919			    memory_order __m2) noexcept
920    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
921
922    __libitm_always_inline bool
923    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
924			    memory_order __m2) volatile noexcept
925    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
926
927    __libitm_always_inline bool
928    compare_exchange_strong(bool& __i1, bool __i2,
929			    memory_order __m = memory_order_seq_cst) noexcept
930    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
931
932    __libitm_always_inline bool
933    compare_exchange_strong(bool& __i1, bool __i2,
934		    memory_order __m = memory_order_seq_cst) volatile noexcept
935    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
936  };
937
938
939  /// atomic
940  /// 29.4.3, Generic atomic type, primary class template.
941  template<typename _Tp>
942    struct atomic
943    {
944    private:
945      _Tp _M_i;
946
947    public:
948      atomic() noexcept = default;
949      ~atomic() noexcept = default;
950      atomic(const atomic&) = delete;
951      atomic& operator=(const atomic&) = delete;
952      atomic& operator=(const atomic&) volatile = delete;
953
954      constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
955
956      operator _Tp() const noexcept
957      { return load(); }
958
959      operator _Tp() const volatile noexcept
960      { return load(); }
961
962      _Tp
963      operator=(_Tp __i) noexcept
964      { store(__i); return __i; }
965
966      _Tp
967      operator=(_Tp __i) volatile noexcept
968      { store(__i); return __i; }
969
970      bool
971      is_lock_free() const noexcept
972      { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
973
974      bool
975      is_lock_free() const volatile noexcept
976      { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
977
978      void
979      store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
980      { __atomic_store(&_M_i, &__i, _m); }
981
982      __libitm_always_inline void
983      store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
984      { __atomic_store(&_M_i, &__i, _m); }
985
986      __libitm_always_inline _Tp
987      load(memory_order _m = memory_order_seq_cst) const noexcept
988      {
989        _Tp tmp;
990	__atomic_load(&_M_i, &tmp, _m);
991	return tmp;
992      }
993
994      __libitm_always_inline _Tp
995      load(memory_order _m = memory_order_seq_cst) const volatile noexcept
996      {
997        _Tp tmp;
998	__atomic_load(&_M_i, &tmp, _m);
999	return tmp;
1000      }
1001
1002      __libitm_always_inline _Tp
1003      exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1004      {
1005        _Tp tmp;
1006	__atomic_exchange(&_M_i, &__i, &tmp, _m);
1007	return tmp;
1008      }
1009
1010      __libitm_always_inline _Tp
1011      exchange(_Tp __i,
1012	       memory_order _m = memory_order_seq_cst) volatile noexcept
1013      {
1014        _Tp tmp;
1015	__atomic_exchange(&_M_i, &__i, &tmp, _m);
1016	return tmp;
1017      }
1018
1019      __libitm_always_inline bool
1020      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1021			    memory_order __f) noexcept
1022      {
1023	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1024      }
1025
1026      __libitm_always_inline bool
1027      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1028			    memory_order __f) volatile noexcept
1029      {
1030	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1031      }
1032
1033      __libitm_always_inline bool
1034      compare_exchange_weak(_Tp& __e, _Tp __i,
1035			    memory_order __m = memory_order_seq_cst) noexcept
1036      { return compare_exchange_weak(__e, __i, __m, __m); }
1037
1038      __libitm_always_inline bool
1039      compare_exchange_weak(_Tp& __e, _Tp __i,
1040		     memory_order __m = memory_order_seq_cst) volatile noexcept
1041      { return compare_exchange_weak(__e, __i, __m, __m); }
1042
1043      __libitm_always_inline bool
1044      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1045			      memory_order __f) noexcept
1046      {
1047	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1048      }
1049
1050      __libitm_always_inline bool
1051      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1052			      memory_order __f) volatile noexcept
1053      {
1054	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1055      }
1056
1057      __libitm_always_inline bool
1058      compare_exchange_strong(_Tp& __e, _Tp __i,
1059			       memory_order __m = memory_order_seq_cst) noexcept
1060      { return compare_exchange_strong(__e, __i, __m, __m); }
1061
1062      __libitm_always_inline bool
1063      compare_exchange_strong(_Tp& __e, _Tp __i,
1064		     memory_order __m = memory_order_seq_cst) volatile noexcept
1065      { return compare_exchange_strong(__e, __i, __m, __m); }
1066    };
1067
1068
1069  /// Partial specialization for pointer types.
1070  template<typename _Tp>
1071    struct atomic<_Tp*>
1072    {
1073      typedef _Tp* 			__pointer_type;
1074      typedef __atomic_base<_Tp*>	__base_type;
1075      __base_type			_M_b;
1076
1077      atomic() noexcept = default;
1078      ~atomic() noexcept = default;
1079      atomic(const atomic&) = delete;
1080      atomic& operator=(const atomic&) = delete;
1081      atomic& operator=(const atomic&) volatile = delete;
1082
1083      constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1084
1085      operator __pointer_type() const noexcept
1086      { return __pointer_type(_M_b); }
1087
1088      operator __pointer_type() const volatile noexcept
1089      { return __pointer_type(_M_b); }
1090
1091      __pointer_type
1092      operator=(__pointer_type __p) noexcept
1093      { return _M_b.operator=(__p); }
1094
1095      __pointer_type
1096      operator=(__pointer_type __p) volatile noexcept
1097      { return _M_b.operator=(__p); }
1098
1099      __pointer_type
1100      operator++(int) noexcept
1101      { return _M_b++; }
1102
1103      __pointer_type
1104      operator++(int) volatile noexcept
1105      { return _M_b++; }
1106
1107      __pointer_type
1108      operator--(int) noexcept
1109      { return _M_b--; }
1110
1111      __pointer_type
1112      operator--(int) volatile noexcept
1113      { return _M_b--; }
1114
1115      __pointer_type
1116      operator++() noexcept
1117      { return ++_M_b; }
1118
1119      __pointer_type
1120      operator++() volatile noexcept
1121      { return ++_M_b; }
1122
1123      __pointer_type
1124      operator--() noexcept
1125      { return --_M_b; }
1126
1127      __pointer_type
1128      operator--() volatile noexcept
1129      { return --_M_b; }
1130
1131      __pointer_type
1132      operator+=(ptrdiff_t __d) noexcept
1133      { return _M_b.operator+=(__d); }
1134
1135      __pointer_type
1136      operator+=(ptrdiff_t __d) volatile noexcept
1137      { return _M_b.operator+=(__d); }
1138
1139      __pointer_type
1140      operator-=(ptrdiff_t __d) noexcept
1141      { return _M_b.operator-=(__d); }
1142
1143      __pointer_type
1144      operator-=(ptrdiff_t __d) volatile noexcept
1145      { return _M_b.operator-=(__d); }
1146
1147      bool
1148      is_lock_free() const noexcept
1149      { return _M_b.is_lock_free(); }
1150
1151      bool
1152      is_lock_free() const volatile noexcept
1153      { return _M_b.is_lock_free(); }
1154
1155      __libitm_always_inline void
1156      store(__pointer_type __p,
1157	    memory_order __m = memory_order_seq_cst) noexcept
1158      { return _M_b.store(__p, __m); }
1159
1160      __libitm_always_inline void
1161      store(__pointer_type __p,
1162	    memory_order __m = memory_order_seq_cst) volatile noexcept
1163      { return _M_b.store(__p, __m); }
1164
1165      __libitm_always_inline __pointer_type
1166      load(memory_order __m = memory_order_seq_cst) const noexcept
1167      { return _M_b.load(__m); }
1168
1169      __libitm_always_inline __pointer_type
1170      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1171      { return _M_b.load(__m); }
1172
1173      __libitm_always_inline __pointer_type
1174      exchange(__pointer_type __p,
1175	       memory_order __m = memory_order_seq_cst) noexcept
1176      { return _M_b.exchange(__p, __m); }
1177
1178      __libitm_always_inline __pointer_type
1179      exchange(__pointer_type __p,
1180	       memory_order __m = memory_order_seq_cst) volatile noexcept
1181      { return _M_b.exchange(__p, __m); }
1182
1183      __libitm_always_inline bool
1184      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1185			    memory_order __m1, memory_order __m2) noexcept
1186      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1187
1188      __libitm_always_inline bool
1189      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1190			    memory_order __m1,
1191			    memory_order __m2) volatile noexcept
1192      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1193
1194      __libitm_always_inline bool
1195      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1196			    memory_order __m = memory_order_seq_cst) noexcept
1197      {
1198	return compare_exchange_weak(__p1, __p2, __m,
1199				     __calculate_memory_order(__m));
1200      }
1201
1202      __libitm_always_inline bool
1203      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1204		    memory_order __m = memory_order_seq_cst) volatile noexcept
1205      {
1206	return compare_exchange_weak(__p1, __p2, __m,
1207				     __calculate_memory_order(__m));
1208      }
1209
1210      __libitm_always_inline bool
1211      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1212			      memory_order __m1, memory_order __m2) noexcept
1213      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1214
1215      __libitm_always_inline bool
1216      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1217			      memory_order __m1,
1218			      memory_order __m2) volatile noexcept
1219      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1220
1221      __libitm_always_inline bool
1222      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1223			      memory_order __m = memory_order_seq_cst) noexcept
1224      {
1225	return _M_b.compare_exchange_strong(__p1, __p2, __m,
1226					    __calculate_memory_order(__m));
1227      }
1228
1229      __libitm_always_inline bool
1230      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1231		    memory_order __m = memory_order_seq_cst) volatile noexcept
1232      {
1233	return _M_b.compare_exchange_strong(__p1, __p2, __m,
1234					    __calculate_memory_order(__m));
1235      }
1236
1237      __libitm_always_inline __pointer_type
1238      fetch_add(ptrdiff_t __d,
1239		memory_order __m = memory_order_seq_cst) noexcept
1240      { return _M_b.fetch_add(__d, __m); }
1241
1242      __libitm_always_inline __pointer_type
1243      fetch_add(ptrdiff_t __d,
1244		memory_order __m = memory_order_seq_cst) volatile noexcept
1245      { return _M_b.fetch_add(__d, __m); }
1246
1247      __libitm_always_inline __pointer_type
1248      fetch_sub(ptrdiff_t __d,
1249		memory_order __m = memory_order_seq_cst) noexcept
1250      { return _M_b.fetch_sub(__d, __m); }
1251
1252      __libitm_always_inline __pointer_type
1253      fetch_sub(ptrdiff_t __d,
1254		memory_order __m = memory_order_seq_cst) volatile noexcept
1255      { return _M_b.fetch_sub(__d, __m); }
1256    };
1257
1258
1259  /// Explicit specialization for bool.
1260  template<>
1261    struct atomic<bool> : public atomic_bool
1262    {
1263      typedef bool 			__integral_type;
1264      typedef atomic_bool 		__base_type;
1265
1266      atomic() noexcept = default;
1267      ~atomic() noexcept = default;
1268      atomic(const atomic&) = delete;
1269      atomic& operator=(const atomic&) = delete;
1270      atomic& operator=(const atomic&) volatile = delete;
1271
1272      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1273
1274      using __base_type::operator __integral_type;
1275      using __base_type::operator=;
1276    };
1277
1278  /// Explicit specialization for char.
1279  template<>
1280    struct atomic<char> : public atomic_char
1281    {
1282      typedef char 			__integral_type;
1283      typedef atomic_char 		__base_type;
1284
1285      atomic() noexcept = default;
1286      ~atomic() noexcept = default;
1287      atomic(const atomic&) = delete;
1288      atomic& operator=(const atomic&) = delete;
1289      atomic& operator=(const atomic&) volatile = delete;
1290
1291      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1292
1293      using __base_type::operator __integral_type;
1294      using __base_type::operator=;
1295    };
1296
1297  /// Explicit specialization for signed char.
1298  template<>
1299    struct atomic<signed char> : public atomic_schar
1300    {
1301      typedef signed char 		__integral_type;
1302      typedef atomic_schar 		__base_type;
1303
1304      atomic() noexcept= default;
1305      ~atomic() noexcept = default;
1306      atomic(const atomic&) = delete;
1307      atomic& operator=(const atomic&) = delete;
1308      atomic& operator=(const atomic&) volatile = delete;
1309
1310      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1311
1312      using __base_type::operator __integral_type;
1313      using __base_type::operator=;
1314    };
1315
1316  /// Explicit specialization for unsigned char.
1317  template<>
1318    struct atomic<unsigned char> : public atomic_uchar
1319    {
1320      typedef unsigned char 		__integral_type;
1321      typedef atomic_uchar 		__base_type;
1322
1323      atomic() noexcept= default;
1324      ~atomic() noexcept = default;
1325      atomic(const atomic&) = delete;
1326      atomic& operator=(const atomic&) = delete;
1327      atomic& operator=(const atomic&) volatile = delete;
1328
1329      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1330
1331      using __base_type::operator __integral_type;
1332      using __base_type::operator=;
1333    };
1334
1335  /// Explicit specialization for short.
1336  template<>
1337    struct atomic<short> : public atomic_short
1338    {
1339      typedef short 			__integral_type;
1340      typedef atomic_short 		__base_type;
1341
1342      atomic() noexcept = default;
1343      ~atomic() noexcept = default;
1344      atomic(const atomic&) = delete;
1345      atomic& operator=(const atomic&) = delete;
1346      atomic& operator=(const atomic&) volatile = delete;
1347
1348      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1349
1350      using __base_type::operator __integral_type;
1351      using __base_type::operator=;
1352    };
1353
1354  /// Explicit specialization for unsigned short.
1355  template<>
1356    struct atomic<unsigned short> : public atomic_ushort
1357    {
1358      typedef unsigned short 	      	__integral_type;
1359      typedef atomic_ushort 		__base_type;
1360
1361      atomic() noexcept = default;
1362      ~atomic() noexcept = default;
1363      atomic(const atomic&) = delete;
1364      atomic& operator=(const atomic&) = delete;
1365      atomic& operator=(const atomic&) volatile = delete;
1366
1367      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1368
1369      using __base_type::operator __integral_type;
1370      using __base_type::operator=;
1371    };
1372
1373  /// Explicit specialization for int.
1374  template<>
1375    struct atomic<int> : atomic_int
1376    {
1377      typedef int 			__integral_type;
1378      typedef atomic_int 		__base_type;
1379
1380      atomic() noexcept = default;
1381      ~atomic() noexcept = default;
1382      atomic(const atomic&) = delete;
1383      atomic& operator=(const atomic&) = delete;
1384      atomic& operator=(const atomic&) volatile = delete;
1385
1386      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1387
1388      using __base_type::operator __integral_type;
1389      using __base_type::operator=;
1390    };
1391
1392  /// Explicit specialization for unsigned int.
1393  template<>
1394    struct atomic<unsigned int> : public atomic_uint
1395    {
1396      typedef unsigned int		__integral_type;
1397      typedef atomic_uint 		__base_type;
1398
1399      atomic() noexcept = default;
1400      ~atomic() noexcept = default;
1401      atomic(const atomic&) = delete;
1402      atomic& operator=(const atomic&) = delete;
1403      atomic& operator=(const atomic&) volatile = delete;
1404
1405      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1406
1407      using __base_type::operator __integral_type;
1408      using __base_type::operator=;
1409    };
1410
1411  /// Explicit specialization for long.
1412  template<>
1413    struct atomic<long> : public atomic_long
1414    {
1415      typedef long 			__integral_type;
1416      typedef atomic_long 		__base_type;
1417
1418      atomic() noexcept = default;
1419      ~atomic() noexcept = default;
1420      atomic(const atomic&) = delete;
1421      atomic& operator=(const atomic&) = delete;
1422      atomic& operator=(const atomic&) volatile = delete;
1423
1424      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1425
1426      using __base_type::operator __integral_type;
1427      using __base_type::operator=;
1428    };
1429
1430  /// Explicit specialization for unsigned long.
1431  template<>
1432    struct atomic<unsigned long> : public atomic_ulong
1433    {
1434      typedef unsigned long 		__integral_type;
1435      typedef atomic_ulong 		__base_type;
1436
1437      atomic() noexcept = default;
1438      ~atomic() noexcept = default;
1439      atomic(const atomic&) = delete;
1440      atomic& operator=(const atomic&) = delete;
1441      atomic& operator=(const atomic&) volatile = delete;
1442
1443      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1444
1445      using __base_type::operator __integral_type;
1446      using __base_type::operator=;
1447    };
1448
1449  /// Explicit specialization for long long.
1450  template<>
1451    struct atomic<long long> : public atomic_llong
1452    {
1453      typedef long long 		__integral_type;
1454      typedef atomic_llong 		__base_type;
1455
1456      atomic() noexcept = default;
1457      ~atomic() noexcept = default;
1458      atomic(const atomic&) = delete;
1459      atomic& operator=(const atomic&) = delete;
1460      atomic& operator=(const atomic&) volatile = delete;
1461
1462      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1463
1464      using __base_type::operator __integral_type;
1465      using __base_type::operator=;
1466    };
1467
1468  /// Explicit specialization for unsigned long long.
1469  template<>
1470    struct atomic<unsigned long long> : public atomic_ullong
1471    {
1472      typedef unsigned long long       	__integral_type;
1473      typedef atomic_ullong 		__base_type;
1474
1475      atomic() noexcept = default;
1476      ~atomic() noexcept = default;
1477      atomic(const atomic&) = delete;
1478      atomic& operator=(const atomic&) = delete;
1479      atomic& operator=(const atomic&) volatile = delete;
1480
1481      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1482
1483      using __base_type::operator __integral_type;
1484      using __base_type::operator=;
1485    };
1486
1487  /// Explicit specialization for wchar_t.
1488  template<>
1489    struct atomic<wchar_t> : public atomic_wchar_t
1490    {
1491      typedef wchar_t 			__integral_type;
1492      typedef atomic_wchar_t 		__base_type;
1493
1494      atomic() noexcept = default;
1495      ~atomic() noexcept = default;
1496      atomic(const atomic&) = delete;
1497      atomic& operator=(const atomic&) = delete;
1498      atomic& operator=(const atomic&) volatile = delete;
1499
1500      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1501
1502      using __base_type::operator __integral_type;
1503      using __base_type::operator=;
1504    };
1505
1506  /// Explicit specialization for char16_t.
1507  template<>
1508    struct atomic<char16_t> : public atomic_char16_t
1509    {
1510      typedef char16_t 			__integral_type;
1511      typedef atomic_char16_t 		__base_type;
1512
1513      atomic() noexcept = default;
1514      ~atomic() noexcept = default;
1515      atomic(const atomic&) = delete;
1516      atomic& operator=(const atomic&) = delete;
1517      atomic& operator=(const atomic&) volatile = delete;
1518
1519      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1520
1521      using __base_type::operator __integral_type;
1522      using __base_type::operator=;
1523    };
1524
1525  /// Explicit specialization for char32_t.
1526  template<>
1527    struct atomic<char32_t> : public atomic_char32_t
1528    {
1529      typedef char32_t 			__integral_type;
1530      typedef atomic_char32_t 		__base_type;
1531
1532      atomic() noexcept = default;
1533      ~atomic() noexcept = default;
1534      atomic(const atomic&) = delete;
1535      atomic& operator=(const atomic&) = delete;
1536      atomic& operator=(const atomic&) volatile = delete;
1537
1538      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1539
1540      using __base_type::operator __integral_type;
1541      using __base_type::operator=;
1542    };
1543
1544
1545  // Function definitions, atomic_flag operations.
1546  inline __libitm_always_inline bool
1547  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1548				    memory_order __m) noexcept
1549  { return __a->test_and_set(__m); }
1550
1551  inline __libitm_always_inline bool
1552  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1553				    memory_order __m) noexcept
1554  { return __a->test_and_set(__m); }
1555
1556  inline __libitm_always_inline void
1557  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1558  { __a->clear(__m); }
1559
1560  inline __libitm_always_inline void
1561  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1562			     memory_order __m) noexcept
1563  { __a->clear(__m); }
1564
1565  inline __libitm_always_inline bool
1566  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1567  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1568
1569  inline __libitm_always_inline bool
1570  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1571  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1572
1573  inline __libitm_always_inline void
1574  atomic_flag_clear(atomic_flag* __a) noexcept
1575  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1576
1577  inline __libitm_always_inline void
1578  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1579  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1580
1581
1582  // Function templates generally applicable to atomic types.
1583  template<typename _ITp>
1584    __libitm_always_inline bool
1585    atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1586    { return __a->is_lock_free(); }
1587
1588  template<typename _ITp>
1589    __libitm_always_inline bool
1590    atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1591    { return __a->is_lock_free(); }
1592
1593  template<typename _ITp>
1594    __libitm_always_inline void
1595    atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1596
1597  template<typename _ITp>
1598    __libitm_always_inline void
1599    atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1600
1601  template<typename _ITp>
1602    __libitm_always_inline void
1603    atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1604			  memory_order __m) noexcept
1605    { __a->store(__i, __m); }
1606
1607  template<typename _ITp>
1608    __libitm_always_inline void
1609    atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1610			  memory_order __m) noexcept
1611    { __a->store(__i, __m); }
1612
1613  template<typename _ITp>
1614    __libitm_always_inline _ITp
1615    atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1616    { return __a->load(__m); }
1617
1618  template<typename _ITp>
1619    __libitm_always_inline _ITp
1620    atomic_load_explicit(const volatile atomic<_ITp>* __a,
1621			 memory_order __m) noexcept
1622    { return __a->load(__m); }
1623
1624  template<typename _ITp>
1625    __libitm_always_inline _ITp
1626    atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1627			     memory_order __m) noexcept
1628    { return __a->exchange(__i, __m); }
1629
1630  template<typename _ITp>
1631    __libitm_always_inline _ITp
1632    atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1633			     memory_order __m) noexcept
1634    { return __a->exchange(__i, __m); }
1635
1636  template<typename _ITp>
1637    __libitm_always_inline bool
1638    atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1639					  _ITp* __i1, _ITp __i2,
1640					  memory_order __m1,
1641					  memory_order __m2) noexcept
1642    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1643
1644  template<typename _ITp>
1645    __libitm_always_inline bool
1646    atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1647					  _ITp* __i1, _ITp __i2,
1648					  memory_order __m1,
1649					  memory_order __m2) noexcept
1650    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1651
1652  template<typename _ITp>
1653    __libitm_always_inline bool
1654    atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1655					    _ITp* __i1, _ITp __i2,
1656					    memory_order __m1,
1657					    memory_order __m2) noexcept
1658    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1659
1660  template<typename _ITp>
1661    __libitm_always_inline bool
1662    atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1663					    _ITp* __i1, _ITp __i2,
1664					    memory_order __m1,
1665					    memory_order __m2) noexcept
1666    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1667
1668
1669  template<typename _ITp>
1670    __libitm_always_inline void
1671    atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1672    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1673
1674  template<typename _ITp>
1675    __libitm_always_inline void
1676    atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1677    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1678
1679  template<typename _ITp>
1680    __libitm_always_inline _ITp
1681    atomic_load(const atomic<_ITp>* __a) noexcept
1682    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1683
1684  template<typename _ITp>
1685    __libitm_always_inline _ITp
1686    atomic_load(const volatile atomic<_ITp>* __a) noexcept
1687    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1688
1689  template<typename _ITp>
1690    __libitm_always_inline _ITp
1691    atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1692    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1693
1694  template<typename _ITp>
1695    __libitm_always_inline _ITp
1696    atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1697    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1698
1699  template<typename _ITp>
1700    __libitm_always_inline bool
1701    atomic_compare_exchange_weak(atomic<_ITp>* __a,
1702				 _ITp* __i1, _ITp __i2) noexcept
1703    {
1704      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1705						   memory_order_seq_cst,
1706						   memory_order_seq_cst);
1707    }
1708
1709  template<typename _ITp>
1710    __libitm_always_inline bool
1711    atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1712				 _ITp* __i1, _ITp __i2) noexcept
1713    {
1714      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1715						   memory_order_seq_cst,
1716						   memory_order_seq_cst);
1717    }
1718
1719  template<typename _ITp>
1720    __libitm_always_inline bool
1721    atomic_compare_exchange_strong(atomic<_ITp>* __a,
1722				   _ITp* __i1, _ITp __i2) noexcept
1723    {
1724      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1725						     memory_order_seq_cst,
1726						     memory_order_seq_cst);
1727    }
1728
1729  template<typename _ITp>
1730    __libitm_always_inline bool
1731    atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1732				   _ITp* __i1, _ITp __i2) noexcept
1733    {
1734      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1735						     memory_order_seq_cst,
1736						     memory_order_seq_cst);
1737    }
1738
1739  // Function templates for atomic_integral operations only, using
1740  // __atomic_base. Template argument should be constricted to
1741  // intergral types as specified in the standard, excluding address
1742  // types.
1743  template<typename _ITp>
1744    __libitm_always_inline _ITp
1745    atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1746			      memory_order __m) noexcept
1747    { return __a->fetch_add(__i, __m); }
1748
1749  template<typename _ITp>
1750    __libitm_always_inline _ITp
1751    atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1752			      memory_order __m) noexcept
1753    { return __a->fetch_add(__i, __m); }
1754
1755  template<typename _ITp>
1756    __libitm_always_inline _ITp
1757    atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1758			      memory_order __m) noexcept
1759    { return __a->fetch_sub(__i, __m); }
1760
1761  template<typename _ITp>
1762    __libitm_always_inline _ITp
1763    atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1764			      memory_order __m) noexcept
1765    { return __a->fetch_sub(__i, __m); }
1766
1767  template<typename _ITp>
1768    __libitm_always_inline _ITp
1769    atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1770			      memory_order __m) noexcept
1771    { return __a->fetch_and(__i, __m); }
1772
1773  template<typename _ITp>
1774    __libitm_always_inline _ITp
1775    atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1776			      memory_order __m) noexcept
1777    { return __a->fetch_and(__i, __m); }
1778
1779  template<typename _ITp>
1780    __libitm_always_inline _ITp
1781    atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1782			     memory_order __m) noexcept
1783    { return __a->fetch_or(__i, __m); }
1784
1785  template<typename _ITp>
1786    __libitm_always_inline _ITp
1787    atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1788			     memory_order __m) noexcept
1789    { return __a->fetch_or(__i, __m); }
1790
1791  template<typename _ITp>
1792    __libitm_always_inline _ITp
1793    atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1794			      memory_order __m) noexcept
1795    { return __a->fetch_xor(__i, __m); }
1796
1797  template<typename _ITp>
1798    __libitm_always_inline _ITp
1799    atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1800			      memory_order __m) noexcept
1801    { return __a->fetch_xor(__i, __m); }
1802
1803  template<typename _ITp>
1804    __libitm_always_inline _ITp
1805    atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1806    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1807
1808  template<typename _ITp>
1809    __libitm_always_inline _ITp
1810    atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1811    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1812
1813  template<typename _ITp>
1814    __libitm_always_inline _ITp
1815    atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1816    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1817
1818  template<typename _ITp>
1819    __libitm_always_inline _ITp
1820    atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1821    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1822
1823  template<typename _ITp>
1824    __libitm_always_inline _ITp
1825    atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1826    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1827
1828  template<typename _ITp>
1829    __libitm_always_inline _ITp
1830    atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1831    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1832
1833  template<typename _ITp>
1834    __libitm_always_inline _ITp
1835    atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1836    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1837
1838  template<typename _ITp>
1839    __libitm_always_inline _ITp
1840    atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1841    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1842
1843  template<typename _ITp>
1844    __libitm_always_inline _ITp
1845    atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1846    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1847
1848  template<typename _ITp>
1849    __libitm_always_inline _ITp
1850    atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1851    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1852
1853
1854  // Partial specializations for pointers.
1855  template<typename _ITp>
1856    __libitm_always_inline _ITp*
1857    atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1858			      memory_order __m) noexcept
1859    { return __a->fetch_add(__d, __m); }
1860
1861  template<typename _ITp>
1862    __libitm_always_inline _ITp*
1863    atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1864			      memory_order __m) noexcept
1865    { return __a->fetch_add(__d, __m); }
1866
1867  template<typename _ITp>
1868    __libitm_always_inline _ITp*
1869    atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1870    { return __a->fetch_add(__d); }
1871
1872  template<typename _ITp>
1873    __libitm_always_inline _ITp*
1874    atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1875    { return __a->fetch_add(__d); }
1876
1877  template<typename _ITp>
1878    __libitm_always_inline _ITp*
1879    atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1880			      ptrdiff_t __d, memory_order __m) noexcept
1881    { return __a->fetch_sub(__d, __m); }
1882
1883  template<typename _ITp>
1884    __libitm_always_inline _ITp*
1885    atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1886			      memory_order __m) noexcept
1887    { return __a->fetch_sub(__d, __m); }
1888
1889  template<typename _ITp>
1890    __libitm_always_inline _ITp*
1891    atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1892    { return __a->fetch_sub(__d); }
1893
1894  template<typename _ITp>
1895    __libitm_always_inline _ITp*
1896    atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1897    { return __a->fetch_sub(__d); }
1898  // @} group atomics
1899
1900// _GLIBCXX_END_NAMESPACE_VERSION
1901} // namespace
1902
1903#endif
1904