1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro
20
21#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
22
23 // order and consistency
24
25 enum memory_order: unspecified // enum class in C++20
26 {
27    relaxed,
28    consume, // load-consume
29    acquire, // load-acquire
30    release, // store-release
31    acq_rel, // store-release load-acquire
32    seq_cst // store-release load-acquire
33 };
34
35 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
36 inline constexpr auto memory_order_consume = memory_order::consume;
37 inline constexpr auto memory_order_acquire = memory_order::acquire;
38 inline constexpr auto memory_order_release = memory_order::release;
39 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
40 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
41
42template <class T> T kill_dependency(T y) noexcept;
43
44// lock-free property
45
46#define ATOMIC_BOOL_LOCK_FREE unspecified
47#define ATOMIC_CHAR_LOCK_FREE unspecified
48#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
49#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
50#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
51#define ATOMIC_SHORT_LOCK_FREE unspecified
52#define ATOMIC_INT_LOCK_FREE unspecified
53#define ATOMIC_LONG_LOCK_FREE unspecified
54#define ATOMIC_LLONG_LOCK_FREE unspecified
55#define ATOMIC_POINTER_LOCK_FREE unspecified
56
57// flag type and operations
58
59typedef struct atomic_flag
60{
61    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
62    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
63    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
64    void clear(memory_order m = memory_order_seq_cst) noexcept;
65    atomic_flag()  noexcept = default;
66    atomic_flag(const atomic_flag&) = delete;
67    atomic_flag& operator=(const atomic_flag&) = delete;
68    atomic_flag& operator=(const atomic_flag&) volatile = delete;
69} atomic_flag;
70
71bool
72    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
73
74bool
75    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
76
77bool
78    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
79                                      memory_order m) noexcept;
80
81bool
82    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
83
84void
85    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
86
87void
88    atomic_flag_clear(atomic_flag* obj) noexcept;
89
90void
91    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
92
93void
94    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
95
96#define ATOMIC_FLAG_INIT see below
97#define ATOMIC_VAR_INIT(value) see below
98
99template <class T>
100struct atomic
101{
102    static constexpr bool is_always_lock_free;
103    bool is_lock_free() const volatile noexcept;
104    bool is_lock_free() const noexcept;
105    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
107    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
108    T load(memory_order m = memory_order_seq_cst) const noexcept;
109    operator T() const volatile noexcept;
110    operator T() const noexcept;
111    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
112    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_weak(T& expc, T desr,
114                               memory_order s, memory_order f) volatile noexcept;
115    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
116    bool compare_exchange_strong(T& expc, T desr,
117                                 memory_order s, memory_order f) volatile noexcept;
118    bool compare_exchange_strong(T& expc, T desr,
119                                 memory_order s, memory_order f) noexcept;
120    bool compare_exchange_weak(T& expc, T desr,
121                               memory_order m = memory_order_seq_cst) volatile noexcept;
122    bool compare_exchange_weak(T& expc, T desr,
123                               memory_order m = memory_order_seq_cst) noexcept;
124    bool compare_exchange_strong(T& expc, T desr,
125                                memory_order m = memory_order_seq_cst) volatile noexcept;
126    bool compare_exchange_strong(T& expc, T desr,
127                                 memory_order m = memory_order_seq_cst) noexcept;
128
129    atomic() noexcept = default;
130    constexpr atomic(T desr) noexcept;
131    atomic(const atomic&) = delete;
132    atomic& operator=(const atomic&) = delete;
133    atomic& operator=(const atomic&) volatile = delete;
134    T operator=(T) volatile noexcept;
135    T operator=(T) noexcept;
136};
137
138template <>
139struct atomic<integral>
140{
141    static constexpr bool is_always_lock_free;
142    bool is_lock_free() const volatile noexcept;
143    bool is_lock_free() const noexcept;
144    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
145    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
146    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
147    integral load(memory_order m = memory_order_seq_cst) const noexcept;
148    operator integral() const volatile noexcept;
149    operator integral() const noexcept;
150    integral exchange(integral desr,
151                      memory_order m = memory_order_seq_cst) volatile noexcept;
152    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_weak(integral& expc, integral desr,
154                               memory_order s, memory_order f) volatile noexcept;
155    bool compare_exchange_weak(integral& expc, integral desr,
156                               memory_order s, memory_order f) noexcept;
157    bool compare_exchange_strong(integral& expc, integral desr,
158                                 memory_order s, memory_order f) volatile noexcept;
159    bool compare_exchange_strong(integral& expc, integral desr,
160                                 memory_order s, memory_order f) noexcept;
161    bool compare_exchange_weak(integral& expc, integral desr,
162                               memory_order m = memory_order_seq_cst) volatile noexcept;
163    bool compare_exchange_weak(integral& expc, integral desr,
164                               memory_order m = memory_order_seq_cst) noexcept;
165    bool compare_exchange_strong(integral& expc, integral desr,
166                                memory_order m = memory_order_seq_cst) volatile noexcept;
167    bool compare_exchange_strong(integral& expc, integral desr,
168                                 memory_order m = memory_order_seq_cst) noexcept;
169
170    integral
171        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
173    integral
174        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
176    integral
177        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
179    integral
180        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
181    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
182    integral
183        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
184    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
185
186    atomic() noexcept = default;
187    constexpr atomic(integral desr) noexcept;
188    atomic(const atomic&) = delete;
189    atomic& operator=(const atomic&) = delete;
190    atomic& operator=(const atomic&) volatile = delete;
191    integral operator=(integral desr) volatile noexcept;
192    integral operator=(integral desr) noexcept;
193
194    integral operator++(int) volatile noexcept;
195    integral operator++(int) noexcept;
196    integral operator--(int) volatile noexcept;
197    integral operator--(int) noexcept;
198    integral operator++() volatile noexcept;
199    integral operator++() noexcept;
200    integral operator--() volatile noexcept;
201    integral operator--() noexcept;
202    integral operator+=(integral op) volatile noexcept;
203    integral operator+=(integral op) noexcept;
204    integral operator-=(integral op) volatile noexcept;
205    integral operator-=(integral op) noexcept;
206    integral operator&=(integral op) volatile noexcept;
207    integral operator&=(integral op) noexcept;
208    integral operator|=(integral op) volatile noexcept;
209    integral operator|=(integral op) noexcept;
210    integral operator^=(integral op) volatile noexcept;
211    integral operator^=(integral op) noexcept;
212};
213
214template <class T>
215struct atomic<T*>
216{
217    static constexpr bool is_always_lock_free;
218    bool is_lock_free() const volatile noexcept;
219    bool is_lock_free() const noexcept;
220    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
223    T* load(memory_order m = memory_order_seq_cst) const noexcept;
224    operator T*() const volatile noexcept;
225    operator T*() const noexcept;
226    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
227    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
228    bool compare_exchange_weak(T*& expc, T* desr,
229                               memory_order s, memory_order f) volatile noexcept;
230    bool compare_exchange_weak(T*& expc, T* desr,
231                               memory_order s, memory_order f) noexcept;
232    bool compare_exchange_strong(T*& expc, T* desr,
233                                 memory_order s, memory_order f) volatile noexcept;
234    bool compare_exchange_strong(T*& expc, T* desr,
235                                 memory_order s, memory_order f) noexcept;
236    bool compare_exchange_weak(T*& expc, T* desr,
237                               memory_order m = memory_order_seq_cst) volatile noexcept;
238    bool compare_exchange_weak(T*& expc, T* desr,
239                               memory_order m = memory_order_seq_cst) noexcept;
240    bool compare_exchange_strong(T*& expc, T* desr,
241                                memory_order m = memory_order_seq_cst) volatile noexcept;
242    bool compare_exchange_strong(T*& expc, T* desr,
243                                 memory_order m = memory_order_seq_cst) noexcept;
244    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
245    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
246    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
247    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
248
249    atomic() noexcept = default;
250    constexpr atomic(T* desr) noexcept;
251    atomic(const atomic&) = delete;
252    atomic& operator=(const atomic&) = delete;
253    atomic& operator=(const atomic&) volatile = delete;
254
255    T* operator=(T*) volatile noexcept;
256    T* operator=(T*) noexcept;
257    T* operator++(int) volatile noexcept;
258    T* operator++(int) noexcept;
259    T* operator--(int) volatile noexcept;
260    T* operator--(int) noexcept;
261    T* operator++() volatile noexcept;
262    T* operator++() noexcept;
263    T* operator--() volatile noexcept;
264    T* operator--() noexcept;
265    T* operator+=(ptrdiff_t op) volatile noexcept;
266    T* operator+=(ptrdiff_t op) noexcept;
267    T* operator-=(ptrdiff_t op) volatile noexcept;
268    T* operator-=(ptrdiff_t op) noexcept;
269};
270
271
272template <class T>
273    bool
274    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
275
276template <class T>
277    bool
278    atomic_is_lock_free(const atomic<T>* obj) noexcept;
279
280template <class T>
281    void
282    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
283
284template <class T>
285    void
286    atomic_init(atomic<T>* obj, T desr) noexcept;
287
288template <class T>
289    void
290    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
291
292template <class T>
293    void
294    atomic_store(atomic<T>* obj, T desr) noexcept;
295
296template <class T>
297    void
298    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
299
300template <class T>
301    void
302    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
303
304template <class T>
305    T
306    atomic_load(const volatile atomic<T>* obj) noexcept;
307
308template <class T>
309    T
310    atomic_load(const atomic<T>* obj) noexcept;
311
312template <class T>
313    T
314    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
315
316template <class T>
317    T
318    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
319
320template <class T>
321    T
322    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
323
324template <class T>
325    T
326    atomic_exchange(atomic<T>* obj, T desr) noexcept;
327
328template <class T>
329    T
330    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
331
332template <class T>
333    T
334    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
335
336template <class T>
337    bool
338    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
339
340template <class T>
341    bool
342    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
343
344template <class T>
345    bool
346    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
347
348template <class T>
349    bool
350    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
351
352template <class T>
353    bool
354    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
355                                          T desr,
356                                          memory_order s, memory_order f) noexcept;
357
358template <class T>
359    bool
360    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
361                                          memory_order s, memory_order f) noexcept;
362
363template <class T>
364    bool
365    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
366                                            T* expc, T desr,
367                                            memory_order s, memory_order f) noexcept;
368
369template <class T>
370    bool
371    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
372                                            T desr,
373                                            memory_order s, memory_order f) noexcept;
374
375template <class Integral>
376    Integral
377    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
378
379template <class Integral>
380    Integral
381    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
382
383template <class Integral>
384    Integral
385    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
386                              memory_order m) noexcept;
387template <class Integral>
388    Integral
389    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
390                              memory_order m) noexcept;
391template <class Integral>
392    Integral
393    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
394
395template <class Integral>
396    Integral
397    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
398
399template <class Integral>
400    Integral
401    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
402                              memory_order m) noexcept;
403template <class Integral>
404    Integral
405    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
406                              memory_order m) noexcept;
407template <class Integral>
408    Integral
409    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
410
411template <class Integral>
412    Integral
413    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
414
415template <class Integral>
416    Integral
417    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
418                              memory_order m) noexcept;
419template <class Integral>
420    Integral
421    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
422                              memory_order m) noexcept;
423template <class Integral>
424    Integral
425    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
426
427template <class Integral>
428    Integral
429    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
430
431template <class Integral>
432    Integral
433    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
434                             memory_order m) noexcept;
435template <class Integral>
436    Integral
437    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
438                             memory_order m) noexcept;
439template <class Integral>
440    Integral
441    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
442
443template <class Integral>
444    Integral
445    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
446
447template <class Integral>
448    Integral
449    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
450                              memory_order m) noexcept;
451template <class Integral>
452    Integral
453    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
454                              memory_order m) noexcept;
455
456template <class T>
457    T*
458    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
459
460template <class T>
461    T*
462    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
463
464template <class T>
465    T*
466    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
467                              memory_order m) noexcept;
468template <class T>
469    T*
470    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
471
472template <class T>
473    T*
474    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
475
476template <class T>
477    T*
478    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
479
480template <class T>
481    T*
482    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
483                              memory_order m) noexcept;
484template <class T>
485    T*
486    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
487
488// Atomics for standard typedef types
489
490typedef atomic<bool>               atomic_bool;
491typedef atomic<char>               atomic_char;
492typedef atomic<signed char>        atomic_schar;
493typedef atomic<unsigned char>      atomic_uchar;
494typedef atomic<short>              atomic_short;
495typedef atomic<unsigned short>     atomic_ushort;
496typedef atomic<int>                atomic_int;
497typedef atomic<unsigned int>       atomic_uint;
498typedef atomic<long>               atomic_long;
499typedef atomic<unsigned long>      atomic_ulong;
500typedef atomic<long long>          atomic_llong;
501typedef atomic<unsigned long long> atomic_ullong;
502typedef atomic<char16_t>           atomic_char16_t;
503typedef atomic<char32_t>           atomic_char32_t;
504typedef atomic<wchar_t>            atomic_wchar_t;
505
506typedef atomic<int_least8_t>   atomic_int_least8_t;
507typedef atomic<uint_least8_t>  atomic_uint_least8_t;
508typedef atomic<int_least16_t>  atomic_int_least16_t;
509typedef atomic<uint_least16_t> atomic_uint_least16_t;
510typedef atomic<int_least32_t>  atomic_int_least32_t;
511typedef atomic<uint_least32_t> atomic_uint_least32_t;
512typedef atomic<int_least64_t>  atomic_int_least64_t;
513typedef atomic<uint_least64_t> atomic_uint_least64_t;
514
515typedef atomic<int_fast8_t>   atomic_int_fast8_t;
516typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
517typedef atomic<int_fast16_t>  atomic_int_fast16_t;
518typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
519typedef atomic<int_fast32_t>  atomic_int_fast32_t;
520typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
521typedef atomic<int_fast64_t>  atomic_int_fast64_t;
522typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
523
524typedef atomic<int8_t>   atomic_int8_t;
525typedef atomic<uint8_t>  atomic_uint8_t;
526typedef atomic<int16_t>  atomic_int16_t;
527typedef atomic<uint16_t> atomic_uint16_t;
528typedef atomic<int32_t>  atomic_int32_t;
529typedef atomic<uint32_t> atomic_uint32_t;
530typedef atomic<int64_t>  atomic_int64_t;
531typedef atomic<uint64_t> atomic_uint64_t;
532
533typedef atomic<intptr_t>  atomic_intptr_t;
534typedef atomic<uintptr_t> atomic_uintptr_t;
535typedef atomic<size_t>    atomic_size_t;
536typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
537typedef atomic<intmax_t>  atomic_intmax_t;
538typedef atomic<uintmax_t> atomic_uintmax_t;
539
540// fences
541
542void atomic_thread_fence(memory_order m) noexcept;
543void atomic_signal_fence(memory_order m) noexcept;
544
545}  // std
546
547*/
548
549#include <__config>
550#include <cstddef>
551#include <cstdint>
552#include <type_traits>
553#include <version>
554
555#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
556#pragma GCC system_header
557#endif
558
559#ifdef _LIBCPP_HAS_NO_THREADS
560# error <atomic> is not supported on this single threaded system
561#endif
562#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
563# error <atomic> is not implemented
564#endif
565#ifdef kill_dependency
566# error C++ standard library is incompatible with <stdatomic.h>
567#endif
568
569#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
570  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
571                           __m == memory_order_acquire || \
572                           __m == memory_order_acq_rel,   \
573                        "memory order argument to atomic operation is invalid")
574
575#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
576  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
577                           __m == memory_order_acq_rel,   \
578                        "memory order argument to atomic operation is invalid")
579
580#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
581  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
582                           __f == memory_order_acq_rel,   \
583                        "memory order argument to atomic operation is invalid")
584
585_LIBCPP_BEGIN_NAMESPACE_STD
586
587// Figure out what the underlying type for `memory_order` would be if it were
588// declared as an unscoped enum (accounting for -fshort-enums). Use this result
589// to pin the underlying type in C++20.
590enum __legacy_memory_order {
591    __mo_relaxed,
592    __mo_consume,
593    __mo_acquire,
594    __mo_release,
595    __mo_acq_rel,
596    __mo_seq_cst
597};
598
599typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
600
601#if _LIBCPP_STD_VER > 17
602
603enum class memory_order : __memory_order_underlying_t {
604  relaxed = __mo_relaxed,
605  consume = __mo_consume,
606  acquire = __mo_acquire,
607  release = __mo_release,
608  acq_rel = __mo_acq_rel,
609  seq_cst = __mo_seq_cst
610};
611
612inline constexpr auto memory_order_relaxed = memory_order::relaxed;
613inline constexpr auto memory_order_consume = memory_order::consume;
614inline constexpr auto memory_order_acquire = memory_order::acquire;
615inline constexpr auto memory_order_release = memory_order::release;
616inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
617inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
618
619#else
620
621typedef enum memory_order {
622  memory_order_relaxed = __mo_relaxed,
623  memory_order_consume = __mo_consume,
624  memory_order_acquire = __mo_acquire,
625  memory_order_release = __mo_release,
626  memory_order_acq_rel = __mo_acq_rel,
627  memory_order_seq_cst = __mo_seq_cst,
628} memory_order;
629
630#endif // _LIBCPP_STD_VER > 17
631
632static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
633  "unexpected underlying type for std::memory_order");
634
635#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
636	defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
637
638// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
639// the default operator= in an object is not volatile, a byte-by-byte copy
640// is required.
641template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
642typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
643__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
644  __a_value = __val;
645}
646template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
647typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
648__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
649  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
650  volatile char* __end = __to + sizeof(_Tp);
651  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
652  while (__to != __end)
653    *__to++ = *__from++;
654}
655
656#endif
657
658#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
659
660template <typename _Tp>
661struct __cxx_atomic_base_impl {
662
663  _LIBCPP_INLINE_VISIBILITY
664#ifndef _LIBCPP_CXX03_LANG
665    __cxx_atomic_base_impl() _NOEXCEPT = default;
666#else
667    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
668#endif // _LIBCPP_CXX03_LANG
669  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
670    : __a_value(value) {}
671  _Tp __a_value;
672};
673
674_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
675  // Avoid switch statement to make this a constexpr.
676  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
677         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
678          (__order == memory_order_release ? __ATOMIC_RELEASE:
679           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
680            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
681              __ATOMIC_CONSUME))));
682}
683
684_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
685  // Avoid switch statement to make this a constexpr.
686  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
687         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
688          (__order == memory_order_release ? __ATOMIC_RELAXED:
689           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
690            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
691              __ATOMIC_CONSUME))));
692}
693
694template <typename _Tp>
695_LIBCPP_INLINE_VISIBILITY
696void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
697  __cxx_atomic_assign_volatile(__a->__a_value, __val);
698}
699
700template <typename _Tp>
701_LIBCPP_INLINE_VISIBILITY
702void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
703  __a->__a_value = __val;
704}
705
706_LIBCPP_INLINE_VISIBILITY inline
707void __cxx_atomic_thread_fence(memory_order __order) {
708  __atomic_thread_fence(__to_gcc_order(__order));
709}
710
711_LIBCPP_INLINE_VISIBILITY inline
712void __cxx_atomic_signal_fence(memory_order __order) {
713  __atomic_signal_fence(__to_gcc_order(__order));
714}
715
716template <typename _Tp>
717_LIBCPP_INLINE_VISIBILITY
718void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
719                        memory_order __order) {
720  __atomic_store(&__a->__a_value, &__val,
721                 __to_gcc_order(__order));
722}
723
724template <typename _Tp>
725_LIBCPP_INLINE_VISIBILITY
726void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
727                        memory_order __order) {
728  __atomic_store(&__a->__a_value, &__val,
729                 __to_gcc_order(__order));
730}
731
732template <typename _Tp>
733_LIBCPP_INLINE_VISIBILITY
734_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
735                      memory_order __order) {
736  _Tp __ret;
737  __atomic_load(&__a->__a_value, &__ret,
738                __to_gcc_order(__order));
739  return __ret;
740}
741
742template <typename _Tp>
743_LIBCPP_INLINE_VISIBILITY
744_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
745  _Tp __ret;
746  __atomic_load(&__a->__a_value, &__ret,
747                __to_gcc_order(__order));
748  return __ret;
749}
750
751template <typename _Tp>
752_LIBCPP_INLINE_VISIBILITY
753_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
754                          _Tp __value, memory_order __order) {
755  _Tp __ret;
756  __atomic_exchange(&__a->__a_value, &__value, &__ret,
757                    __to_gcc_order(__order));
758  return __ret;
759}
760
761template <typename _Tp>
762_LIBCPP_INLINE_VISIBILITY
763_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
764                          memory_order __order) {
765  _Tp __ret;
766  __atomic_exchange(&__a->__a_value, &__value, &__ret,
767                    __to_gcc_order(__order));
768  return __ret;
769}
770
771template <typename _Tp>
772_LIBCPP_INLINE_VISIBILITY
773bool __cxx_atomic_compare_exchange_strong(
774    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
775    memory_order __success, memory_order __failure) {
776  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
777                                   false,
778                                   __to_gcc_order(__success),
779                                   __to_gcc_failure_order(__failure));
780}
781
782template <typename _Tp>
783_LIBCPP_INLINE_VISIBILITY
784bool __cxx_atomic_compare_exchange_strong(
785    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
786    memory_order __failure) {
787  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
788                                   false,
789                                   __to_gcc_order(__success),
790                                   __to_gcc_failure_order(__failure));
791}
792
793template <typename _Tp>
794_LIBCPP_INLINE_VISIBILITY
795bool __cxx_atomic_compare_exchange_weak(
796    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
797    memory_order __success, memory_order __failure) {
798  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
799                                   true,
800                                   __to_gcc_order(__success),
801                                   __to_gcc_failure_order(__failure));
802}
803
804template <typename _Tp>
805_LIBCPP_INLINE_VISIBILITY
806bool __cxx_atomic_compare_exchange_weak(
807    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
808    memory_order __failure) {
809  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
810                                   true,
811                                   __to_gcc_order(__success),
812                                   __to_gcc_failure_order(__failure));
813}
814
815template <typename _Tp>
816struct __skip_amt { enum {value = 1}; };
817
818template <typename _Tp>
819struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
820
821// FIXME: Haven't figured out what the spec says about using arrays with
822// atomic_fetch_add. Force a failure rather than creating bad behavior.
823template <typename _Tp>
824struct __skip_amt<_Tp[]> { };
825template <typename _Tp, int n>
826struct __skip_amt<_Tp[n]> { };
827
828template <typename _Tp, typename _Td>
829_LIBCPP_INLINE_VISIBILITY
830_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
831                           _Td __delta, memory_order __order) {
832  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
833                            __to_gcc_order(__order));
834}
835
836template <typename _Tp, typename _Td>
837_LIBCPP_INLINE_VISIBILITY
838_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
839                           memory_order __order) {
840  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
841                            __to_gcc_order(__order));
842}
843
844template <typename _Tp, typename _Td>
845_LIBCPP_INLINE_VISIBILITY
846_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
847                           _Td __delta, memory_order __order) {
848  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
849                            __to_gcc_order(__order));
850}
851
852template <typename _Tp, typename _Td>
853_LIBCPP_INLINE_VISIBILITY
854_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
855                           memory_order __order) {
856  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
857                            __to_gcc_order(__order));
858}
859
860template <typename _Tp>
861_LIBCPP_INLINE_VISIBILITY
862_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
863                           _Tp __pattern, memory_order __order) {
864  return __atomic_fetch_and(&__a->__a_value, __pattern,
865                            __to_gcc_order(__order));
866}
867
868template <typename _Tp>
869_LIBCPP_INLINE_VISIBILITY
870_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
871                           _Tp __pattern, memory_order __order) {
872  return __atomic_fetch_and(&__a->__a_value, __pattern,
873                            __to_gcc_order(__order));
874}
875
876template <typename _Tp>
877_LIBCPP_INLINE_VISIBILITY
878_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
879                          _Tp __pattern, memory_order __order) {
880  return __atomic_fetch_or(&__a->__a_value, __pattern,
881                           __to_gcc_order(__order));
882}
883
884template <typename _Tp>
885_LIBCPP_INLINE_VISIBILITY
886_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
887                          memory_order __order) {
888  return __atomic_fetch_or(&__a->__a_value, __pattern,
889                           __to_gcc_order(__order));
890}
891
892template <typename _Tp>
893_LIBCPP_INLINE_VISIBILITY
894_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
895                           _Tp __pattern, memory_order __order) {
896  return __atomic_fetch_xor(&__a->__a_value, __pattern,
897                            __to_gcc_order(__order));
898}
899
900template <typename _Tp>
901_LIBCPP_INLINE_VISIBILITY
902_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
903                           memory_order __order) {
904  return __atomic_fetch_xor(&__a->__a_value, __pattern,
905                            __to_gcc_order(__order));
906}
907
908#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
909
910#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
911
912template <typename _Tp>
913struct __cxx_atomic_base_impl {
914
915  _LIBCPP_INLINE_VISIBILITY
916#ifndef _LIBCPP_CXX03_LANG
917    __cxx_atomic_base_impl() _NOEXCEPT = default;
918#else
919    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
920#endif // _LIBCPP_CXX03_LANG
921  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
922    : __a_value(value) {}
923  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
924};
925
926#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
927
928_LIBCPP_INLINE_VISIBILITY inline
929void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
930    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
931}
932
933_LIBCPP_INLINE_VISIBILITY inline
934void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
935    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
936}
937
938template<class _Tp>
939_LIBCPP_INLINE_VISIBILITY
940void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
941    __c11_atomic_init(&__a->__a_value, __val);
942}
943template<class _Tp>
944_LIBCPP_INLINE_VISIBILITY
945void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
946    __c11_atomic_init(&__a->__a_value, __val);
947}
948
949template<class _Tp>
950_LIBCPP_INLINE_VISIBILITY
951void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
952    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
953}
954template<class _Tp>
955_LIBCPP_INLINE_VISIBILITY
956void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
957    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
958}
959
960template<class _Tp>
961_LIBCPP_INLINE_VISIBILITY
962_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
963    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
964    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
965}
966template<class _Tp>
967_LIBCPP_INLINE_VISIBILITY
968_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
969    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
970    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
971}
972
973template<class _Tp>
974_LIBCPP_INLINE_VISIBILITY
975_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
976    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
977}
978template<class _Tp>
979_LIBCPP_INLINE_VISIBILITY
980_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
981    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
982}
983
984template<class _Tp>
985_LIBCPP_INLINE_VISIBILITY
986bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
987    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
988}
989template<class _Tp>
990_LIBCPP_INLINE_VISIBILITY
991bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
992    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
993}
994
995template<class _Tp>
996_LIBCPP_INLINE_VISIBILITY
997bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
998    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
999}
1000template<class _Tp>
1001_LIBCPP_INLINE_VISIBILITY
1002bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1003    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1004}
1005
1006template<class _Tp>
1007_LIBCPP_INLINE_VISIBILITY
1008_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1009    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1010}
1011template<class _Tp>
1012_LIBCPP_INLINE_VISIBILITY
1013_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1014    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1015}
1016
1017template<class _Tp>
1018_LIBCPP_INLINE_VISIBILITY
1019_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1020    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1021}
1022template<class _Tp>
1023_LIBCPP_INLINE_VISIBILITY
1024_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1025    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1026}
1027
1028template<class _Tp>
1029_LIBCPP_INLINE_VISIBILITY
1030_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1031    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1032}
1033template<class _Tp>
1034_LIBCPP_INLINE_VISIBILITY
1035_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1036    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1037}
1038template<class _Tp>
1039_LIBCPP_INLINE_VISIBILITY
1040_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1041    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1042}
1043template<class _Tp>
1044_LIBCPP_INLINE_VISIBILITY
1045_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1046    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1047}
1048
1049template<class _Tp>
1050_LIBCPP_INLINE_VISIBILITY
1051_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1052    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1053}
1054template<class _Tp>
1055_LIBCPP_INLINE_VISIBILITY
1056_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1057    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1058}
1059
1060template<class _Tp>
1061_LIBCPP_INLINE_VISIBILITY
1062_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1063    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1064}
1065template<class _Tp>
1066_LIBCPP_INLINE_VISIBILITY
1067_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1068    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1069}
1070
1071template<class _Tp>
1072_LIBCPP_INLINE_VISIBILITY
1073_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1074    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1075}
1076template<class _Tp>
1077_LIBCPP_INLINE_VISIBILITY
1078_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1079    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1080}
1081
1082#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1083
1084template <class _Tp>
1085_LIBCPP_INLINE_VISIBILITY
1086_Tp kill_dependency(_Tp __y) _NOEXCEPT
1087{
1088    return __y;
1089}
1090
1091#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1092# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1093# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1094# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1095# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1096# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1097# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1098# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1099# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1100# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1101# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1102#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1103# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1104# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1105# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1106# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1107# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1108# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1109# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1110# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1111# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1112# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1113#endif
1114
1115#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1116
1117template<typename _Tp>
1118struct __cxx_atomic_lock_impl {
1119
1120  _LIBCPP_INLINE_VISIBILITY
1121  __cxx_atomic_lock_impl() _NOEXCEPT
1122    : __a_value(), __a_lock(0) {}
1123  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1124  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1125    : __a_value(value), __a_lock(0) {}
1126
1127  _Tp __a_value;
1128  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1129
1130  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1131    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1132        /*spin*/;
1133  }
1134  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1135    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1136        /*spin*/;
1137  }
1138  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1139    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1140  }
1141  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1142    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1143  }
1144  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1145    __lock();
1146    _Tp __old;
1147    __cxx_atomic_assign_volatile(__old, __a_value);
1148    __unlock();
1149    return __old;
1150  }
1151  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1152    __lock();
1153    _Tp __old = __a_value;
1154    __unlock();
1155    return __old;
1156  }
1157};
1158
1159template <typename _Tp>
1160_LIBCPP_INLINE_VISIBILITY
1161void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1162  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1163}
1164template <typename _Tp>
1165_LIBCPP_INLINE_VISIBILITY
1166void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1167  __a->__a_value = __val;
1168}
1169
1170template <typename _Tp>
1171_LIBCPP_INLINE_VISIBILITY
1172void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1173  __a->__lock();
1174  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1175  __a->__unlock();
1176}
1177template <typename _Tp>
1178_LIBCPP_INLINE_VISIBILITY
1179void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1180  __a->__lock();
1181  __a->__a_value = __val;
1182  __a->__unlock();
1183}
1184
1185template <typename _Tp>
1186_LIBCPP_INLINE_VISIBILITY
1187_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1188  return __a->__read();
1189}
1190template <typename _Tp>
1191_LIBCPP_INLINE_VISIBILITY
1192_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1193  return __a->__read();
1194}
1195
1196template <typename _Tp>
1197_LIBCPP_INLINE_VISIBILITY
1198_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1199  __a->__lock();
1200  _Tp __old;
1201  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1202  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1203  __a->__unlock();
1204  return __old;
1205}
1206template <typename _Tp>
1207_LIBCPP_INLINE_VISIBILITY
1208_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1209  __a->__lock();
1210  _Tp __old = __a->__a_value;
1211  __a->__a_value = __value;
1212  __a->__unlock();
1213  return __old;
1214}
1215
1216template <typename _Tp>
1217_LIBCPP_INLINE_VISIBILITY
1218bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1219                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1220  __a->__lock();
1221  _Tp temp;
1222  __cxx_atomic_assign_volatile(temp, __a->__a_value);
1223  bool __ret = temp == *__expected;
1224  if(__ret)
1225    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1226  else
1227    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1228  __a->__unlock();
1229  return __ret;
1230}
1231template <typename _Tp>
1232_LIBCPP_INLINE_VISIBILITY
1233bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1234                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1235  __a->__lock();
1236  bool __ret = __a->__a_value == *__expected;
1237  if(__ret)
1238    __a->__a_value = __value;
1239  else
1240    *__expected = __a->__a_value;
1241  __a->__unlock();
1242  return __ret;
1243}
1244
1245template <typename _Tp>
1246_LIBCPP_INLINE_VISIBILITY
1247bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1248                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1249  __a->__lock();
1250  _Tp temp;
1251  __cxx_atomic_assign_volatile(temp, __a->__a_value);
1252  bool __ret = temp == *__expected;
1253  if(__ret)
1254    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1255  else
1256    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1257  __a->__unlock();
1258  return __ret;
1259}
1260template <typename _Tp>
1261_LIBCPP_INLINE_VISIBILITY
1262bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1263                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1264  __a->__lock();
1265  bool __ret = __a->__a_value == *__expected;
1266  if(__ret)
1267    __a->__a_value = __value;
1268  else
1269    *__expected = __a->__a_value;
1270  __a->__unlock();
1271  return __ret;
1272}
1273
1274template <typename _Tp, typename _Td>
1275_LIBCPP_INLINE_VISIBILITY
1276_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1277                           _Td __delta, memory_order) {
1278  __a->__lock();
1279  _Tp __old;
1280  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1281  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1282  __a->__unlock();
1283  return __old;
1284}
1285template <typename _Tp, typename _Td>
1286_LIBCPP_INLINE_VISIBILITY
1287_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1288                           _Td __delta, memory_order) {
1289  __a->__lock();
1290  _Tp __old = __a->__a_value;
1291  __a->__a_value += __delta;
1292  __a->__unlock();
1293  return __old;
1294}
1295
1296template <typename _Tp, typename _Td>
1297_LIBCPP_INLINE_VISIBILITY
1298_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1299                           ptrdiff_t __delta, memory_order) {
1300  __a->__lock();
1301  _Tp* __old;
1302  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1303  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1304  __a->__unlock();
1305  return __old;
1306}
1307template <typename _Tp, typename _Td>
1308_LIBCPP_INLINE_VISIBILITY
1309_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1310                           ptrdiff_t __delta, memory_order) {
1311  __a->__lock();
1312  _Tp* __old = __a->__a_value;
1313  __a->__a_value += __delta;
1314  __a->__unlock();
1315  return __old;
1316}
1317
1318template <typename _Tp, typename _Td>
1319_LIBCPP_INLINE_VISIBILITY
1320_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1321                           _Td __delta, memory_order) {
1322  __a->__lock();
1323  _Tp __old;
1324  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1325  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1326  __a->__unlock();
1327  return __old;
1328}
1329template <typename _Tp, typename _Td>
1330_LIBCPP_INLINE_VISIBILITY
1331_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1332                           _Td __delta, memory_order) {
1333  __a->__lock();
1334  _Tp __old = __a->__a_value;
1335  __a->__a_value -= __delta;
1336  __a->__unlock();
1337  return __old;
1338}
1339
1340template <typename _Tp>
1341_LIBCPP_INLINE_VISIBILITY
1342_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1343                           _Tp __pattern, memory_order) {
1344  __a->__lock();
1345  _Tp __old;
1346  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1347  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1348  __a->__unlock();
1349  return __old;
1350}
1351template <typename _Tp>
1352_LIBCPP_INLINE_VISIBILITY
1353_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1354                           _Tp __pattern, memory_order) {
1355  __a->__lock();
1356  _Tp __old = __a->__a_value;
1357  __a->__a_value &= __pattern;
1358  __a->__unlock();
1359  return __old;
1360}
1361
1362template <typename _Tp>
1363_LIBCPP_INLINE_VISIBILITY
1364_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1365                          _Tp __pattern, memory_order) {
1366  __a->__lock();
1367  _Tp __old;
1368  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1369  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1370  __a->__unlock();
1371  return __old;
1372}
1373template <typename _Tp>
1374_LIBCPP_INLINE_VISIBILITY
1375_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1376                          _Tp __pattern, memory_order) {
1377  __a->__lock();
1378  _Tp __old = __a->__a_value;
1379  __a->__a_value |= __pattern;
1380  __a->__unlock();
1381  return __old;
1382}
1383
1384template <typename _Tp>
1385_LIBCPP_INLINE_VISIBILITY
1386_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1387                           _Tp __pattern, memory_order) {
1388  __a->__lock();
1389  _Tp __old;
1390  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1391  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1392  __a->__unlock();
1393  return __old;
1394}
1395template <typename _Tp>
1396_LIBCPP_INLINE_VISIBILITY
1397_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1398                           _Tp __pattern, memory_order) {
1399  __a->__lock();
1400  _Tp __old = __a->__a_value;
1401  __a->__a_value ^= __pattern;
1402  __a->__unlock();
1403  return __old;
1404}
1405
1406#ifdef __cpp_lib_atomic_is_always_lock_free
1407
1408template<typename _Tp> struct __cxx_is_always_lock_free {
1409    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1410
1411#else
1412
1413template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1414// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1415template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1416template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1417template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1418template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1419template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1420template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1421template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1422template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1423template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1424template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1425template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1426template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1427template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1428template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1429template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1430template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1431template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1432
1433#endif //__cpp_lib_atomic_is_always_lock_free
1434
1435template <typename _Tp,
1436          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1437                                                __cxx_atomic_base_impl<_Tp>,
1438                                                __cxx_atomic_lock_impl<_Tp> >::type>
1439#else
1440template <typename _Tp,
1441          typename _Base = __cxx_atomic_base_impl<_Tp> >
1442#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1443struct __cxx_atomic_impl : public _Base {
1444
1445#if _GNUC_VER >= 501
1446    static_assert(is_trivially_copyable<_Tp>::value,
1447      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1448#endif
1449
1450  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1451  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1452    : _Base(value) {}
1453};
1454
1455// general atomic<T>
1456
1457template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1458struct __atomic_base  // false
1459{
1460    mutable __cxx_atomic_impl<_Tp> __a_;
1461
1462#if defined(__cpp_lib_atomic_is_always_lock_free)
1463  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1464#endif
1465
1466    _LIBCPP_INLINE_VISIBILITY
1467    bool is_lock_free() const volatile _NOEXCEPT
1468        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1469    _LIBCPP_INLINE_VISIBILITY
1470    bool is_lock_free() const _NOEXCEPT
1471        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1472    _LIBCPP_INLINE_VISIBILITY
1473    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1474      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1475        {__cxx_atomic_store(&__a_, __d, __m);}
1476    _LIBCPP_INLINE_VISIBILITY
1477    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1478      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1479        {__cxx_atomic_store(&__a_, __d, __m);}
1480    _LIBCPP_INLINE_VISIBILITY
1481    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1482      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1483        {return __cxx_atomic_load(&__a_, __m);}
1484    _LIBCPP_INLINE_VISIBILITY
1485    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1486      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1487        {return __cxx_atomic_load(&__a_, __m);}
1488    _LIBCPP_INLINE_VISIBILITY
1489    operator _Tp() const volatile _NOEXCEPT {return load();}
1490    _LIBCPP_INLINE_VISIBILITY
1491    operator _Tp() const _NOEXCEPT          {return load();}
1492    _LIBCPP_INLINE_VISIBILITY
1493    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1494        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1495    _LIBCPP_INLINE_VISIBILITY
1496    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1497        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1498    _LIBCPP_INLINE_VISIBILITY
1499    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1500                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1501      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1502        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1503    _LIBCPP_INLINE_VISIBILITY
1504    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1505                               memory_order __s, memory_order __f) _NOEXCEPT
1506      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1507        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1508    _LIBCPP_INLINE_VISIBILITY
1509    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1510                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1511      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1512        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1513    _LIBCPP_INLINE_VISIBILITY
1514    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1515                                 memory_order __s, memory_order __f) _NOEXCEPT
1516      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1517        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1518    _LIBCPP_INLINE_VISIBILITY
1519    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1520                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1521        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1522    _LIBCPP_INLINE_VISIBILITY
1523    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1524                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1525        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1526    _LIBCPP_INLINE_VISIBILITY
1527    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1528                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1529        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1530    _LIBCPP_INLINE_VISIBILITY
1531    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1532                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1533        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1534
1535    _LIBCPP_INLINE_VISIBILITY
1536    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1537
1538    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1539    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1540
1541#ifndef _LIBCPP_CXX03_LANG
1542    __atomic_base(const __atomic_base&) = delete;
1543    __atomic_base& operator=(const __atomic_base&) = delete;
1544    __atomic_base& operator=(const __atomic_base&) volatile = delete;
1545#else
1546private:
1547    __atomic_base(const __atomic_base&);
1548    __atomic_base& operator=(const __atomic_base&);
1549    __atomic_base& operator=(const __atomic_base&) volatile;
1550#endif
1551};
1552
1553#if defined(__cpp_lib_atomic_is_always_lock_free)
1554template <class _Tp, bool __b>
1555_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1556#endif
1557
1558// atomic<Integral>
1559
1560template <class _Tp>
1561struct __atomic_base<_Tp, true>
1562    : public __atomic_base<_Tp, false>
1563{
1564    typedef __atomic_base<_Tp, false> __base;
1565    _LIBCPP_INLINE_VISIBILITY
1566    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1567    _LIBCPP_INLINE_VISIBILITY
1568    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1569
1570    _LIBCPP_INLINE_VISIBILITY
1571    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1572        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1573    _LIBCPP_INLINE_VISIBILITY
1574    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1575        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1576    _LIBCPP_INLINE_VISIBILITY
1577    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1578        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1579    _LIBCPP_INLINE_VISIBILITY
1580    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1581        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1582    _LIBCPP_INLINE_VISIBILITY
1583    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1584        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1585    _LIBCPP_INLINE_VISIBILITY
1586    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1587        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1588    _LIBCPP_INLINE_VISIBILITY
1589    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1590        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1591    _LIBCPP_INLINE_VISIBILITY
1592    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1593        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1594    _LIBCPP_INLINE_VISIBILITY
1595    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1596        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1597    _LIBCPP_INLINE_VISIBILITY
1598    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1599        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1600
1601    _LIBCPP_INLINE_VISIBILITY
1602    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1603    _LIBCPP_INLINE_VISIBILITY
1604    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1605    _LIBCPP_INLINE_VISIBILITY
1606    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1607    _LIBCPP_INLINE_VISIBILITY
1608    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1609    _LIBCPP_INLINE_VISIBILITY
1610    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1611    _LIBCPP_INLINE_VISIBILITY
1612    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1613    _LIBCPP_INLINE_VISIBILITY
1614    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1615    _LIBCPP_INLINE_VISIBILITY
1616    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1617    _LIBCPP_INLINE_VISIBILITY
1618    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1619    _LIBCPP_INLINE_VISIBILITY
1620    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1621    _LIBCPP_INLINE_VISIBILITY
1622    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1623    _LIBCPP_INLINE_VISIBILITY
1624    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1625    _LIBCPP_INLINE_VISIBILITY
1626    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1627    _LIBCPP_INLINE_VISIBILITY
1628    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1629    _LIBCPP_INLINE_VISIBILITY
1630    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1631    _LIBCPP_INLINE_VISIBILITY
1632    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1633    _LIBCPP_INLINE_VISIBILITY
1634    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1635    _LIBCPP_INLINE_VISIBILITY
1636    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1637};
1638
1639// atomic<T>
1640
1641template <class _Tp>
1642struct atomic
1643    : public __atomic_base<_Tp>
1644{
1645    typedef __atomic_base<_Tp> __base;
1646    _LIBCPP_INLINE_VISIBILITY
1647    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1648    _LIBCPP_INLINE_VISIBILITY
1649    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1650
1651    _LIBCPP_INLINE_VISIBILITY
1652    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1653        {__base::store(__d); return __d;}
1654    _LIBCPP_INLINE_VISIBILITY
1655    _Tp operator=(_Tp __d) _NOEXCEPT
1656        {__base::store(__d); return __d;}
1657};
1658
1659// atomic<T*>
1660
1661template <class _Tp>
1662struct atomic<_Tp*>
1663    : public __atomic_base<_Tp*>
1664{
1665    typedef __atomic_base<_Tp*> __base;
1666    _LIBCPP_INLINE_VISIBILITY
1667    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1668    _LIBCPP_INLINE_VISIBILITY
1669    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1670
1671    _LIBCPP_INLINE_VISIBILITY
1672    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1673        {__base::store(__d); return __d;}
1674    _LIBCPP_INLINE_VISIBILITY
1675    _Tp* operator=(_Tp* __d) _NOEXCEPT
1676        {__base::store(__d); return __d;}
1677
1678    _LIBCPP_INLINE_VISIBILITY
1679    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1680                                                                        volatile _NOEXCEPT
1681        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1682    _LIBCPP_INLINE_VISIBILITY
1683    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1684        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1685    _LIBCPP_INLINE_VISIBILITY
1686    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1687                                                                        volatile _NOEXCEPT
1688        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1689    _LIBCPP_INLINE_VISIBILITY
1690    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1691        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1692
1693    _LIBCPP_INLINE_VISIBILITY
1694    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1695    _LIBCPP_INLINE_VISIBILITY
1696    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1697    _LIBCPP_INLINE_VISIBILITY
1698    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1699    _LIBCPP_INLINE_VISIBILITY
1700    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1701    _LIBCPP_INLINE_VISIBILITY
1702    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1703    _LIBCPP_INLINE_VISIBILITY
1704    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1705    _LIBCPP_INLINE_VISIBILITY
1706    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1707    _LIBCPP_INLINE_VISIBILITY
1708    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1709    _LIBCPP_INLINE_VISIBILITY
1710    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1711    _LIBCPP_INLINE_VISIBILITY
1712    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1713    _LIBCPP_INLINE_VISIBILITY
1714    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1715    _LIBCPP_INLINE_VISIBILITY
1716    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1717};
1718
1719// atomic_is_lock_free
1720
1721template <class _Tp>
1722_LIBCPP_INLINE_VISIBILITY
1723bool
1724atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1725{
1726    return __o->is_lock_free();
1727}
1728
1729template <class _Tp>
1730_LIBCPP_INLINE_VISIBILITY
1731bool
1732atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1733{
1734    return __o->is_lock_free();
1735}
1736
1737// atomic_init
1738
1739template <class _Tp>
1740_LIBCPP_INLINE_VISIBILITY
1741void
1742atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1743{
1744    __cxx_atomic_init(&__o->__a_, __d);
1745}
1746
1747template <class _Tp>
1748_LIBCPP_INLINE_VISIBILITY
1749void
1750atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1751{
1752    __cxx_atomic_init(&__o->__a_, __d);
1753}
1754
1755// atomic_store
1756
1757template <class _Tp>
1758_LIBCPP_INLINE_VISIBILITY
1759void
1760atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1761{
1762    __o->store(__d);
1763}
1764
1765template <class _Tp>
1766_LIBCPP_INLINE_VISIBILITY
1767void
1768atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1769{
1770    __o->store(__d);
1771}
1772
1773// atomic_store_explicit
1774
1775template <class _Tp>
1776_LIBCPP_INLINE_VISIBILITY
1777void
1778atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1779  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1780{
1781    __o->store(__d, __m);
1782}
1783
1784template <class _Tp>
1785_LIBCPP_INLINE_VISIBILITY
1786void
1787atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1788  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1789{
1790    __o->store(__d, __m);
1791}
1792
1793// atomic_load
1794
1795template <class _Tp>
1796_LIBCPP_INLINE_VISIBILITY
1797_Tp
1798atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1799{
1800    return __o->load();
1801}
1802
1803template <class _Tp>
1804_LIBCPP_INLINE_VISIBILITY
1805_Tp
1806atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1807{
1808    return __o->load();
1809}
1810
1811// atomic_load_explicit
1812
1813template <class _Tp>
1814_LIBCPP_INLINE_VISIBILITY
1815_Tp
1816atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1817  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1818{
1819    return __o->load(__m);
1820}
1821
1822template <class _Tp>
1823_LIBCPP_INLINE_VISIBILITY
1824_Tp
1825atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1826  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1827{
1828    return __o->load(__m);
1829}
1830
1831// atomic_exchange
1832
1833template <class _Tp>
1834_LIBCPP_INLINE_VISIBILITY
1835_Tp
1836atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1837{
1838    return __o->exchange(__d);
1839}
1840
1841template <class _Tp>
1842_LIBCPP_INLINE_VISIBILITY
1843_Tp
1844atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1845{
1846    return __o->exchange(__d);
1847}
1848
1849// atomic_exchange_explicit
1850
1851template <class _Tp>
1852_LIBCPP_INLINE_VISIBILITY
1853_Tp
1854atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1855{
1856    return __o->exchange(__d, __m);
1857}
1858
1859template <class _Tp>
1860_LIBCPP_INLINE_VISIBILITY
1861_Tp
1862atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1863{
1864    return __o->exchange(__d, __m);
1865}
1866
1867// atomic_compare_exchange_weak
1868
1869template <class _Tp>
1870_LIBCPP_INLINE_VISIBILITY
1871bool
1872atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1873{
1874    return __o->compare_exchange_weak(*__e, __d);
1875}
1876
1877template <class _Tp>
1878_LIBCPP_INLINE_VISIBILITY
1879bool
1880atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1881{
1882    return __o->compare_exchange_weak(*__e, __d);
1883}
1884
1885// atomic_compare_exchange_strong
1886
1887template <class _Tp>
1888_LIBCPP_INLINE_VISIBILITY
1889bool
1890atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1891{
1892    return __o->compare_exchange_strong(*__e, __d);
1893}
1894
1895template <class _Tp>
1896_LIBCPP_INLINE_VISIBILITY
1897bool
1898atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1899{
1900    return __o->compare_exchange_strong(*__e, __d);
1901}
1902
1903// atomic_compare_exchange_weak_explicit
1904
1905template <class _Tp>
1906_LIBCPP_INLINE_VISIBILITY
1907bool
1908atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1909                                      _Tp __d,
1910                                      memory_order __s, memory_order __f) _NOEXCEPT
1911  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1912{
1913    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1914}
1915
1916template <class _Tp>
1917_LIBCPP_INLINE_VISIBILITY
1918bool
1919atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1920                                      memory_order __s, memory_order __f) _NOEXCEPT
1921  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1922{
1923    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1924}
1925
1926// atomic_compare_exchange_strong_explicit
1927
1928template <class _Tp>
1929_LIBCPP_INLINE_VISIBILITY
1930bool
1931atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1932                                        _Tp* __e, _Tp __d,
1933                                        memory_order __s, memory_order __f) _NOEXCEPT
1934  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1935{
1936    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1937}
1938
1939template <class _Tp>
1940_LIBCPP_INLINE_VISIBILITY
1941bool
1942atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1943                                        _Tp __d,
1944                                        memory_order __s, memory_order __f) _NOEXCEPT
1945  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1946{
1947    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1948}
1949
1950// atomic_fetch_add
1951
1952template <class _Tp>
1953_LIBCPP_INLINE_VISIBILITY
1954typename enable_if
1955<
1956    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1957    _Tp
1958>::type
1959atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1960{
1961    return __o->fetch_add(__op);
1962}
1963
1964template <class _Tp>
1965_LIBCPP_INLINE_VISIBILITY
1966typename enable_if
1967<
1968    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1969    _Tp
1970>::type
1971atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1972{
1973    return __o->fetch_add(__op);
1974}
1975
1976template <class _Tp>
1977_LIBCPP_INLINE_VISIBILITY
1978_Tp*
1979atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1980{
1981    return __o->fetch_add(__op);
1982}
1983
1984template <class _Tp>
1985_LIBCPP_INLINE_VISIBILITY
1986_Tp*
1987atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1988{
1989    return __o->fetch_add(__op);
1990}
1991
1992// atomic_fetch_add_explicit
1993
1994template <class _Tp>
1995_LIBCPP_INLINE_VISIBILITY
1996typename enable_if
1997<
1998    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1999    _Tp
2000>::type
2001atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2002{
2003    return __o->fetch_add(__op, __m);
2004}
2005
2006template <class _Tp>
2007_LIBCPP_INLINE_VISIBILITY
2008typename enable_if
2009<
2010    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2011    _Tp
2012>::type
2013atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2014{
2015    return __o->fetch_add(__op, __m);
2016}
2017
2018template <class _Tp>
2019_LIBCPP_INLINE_VISIBILITY
2020_Tp*
2021atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2022                          memory_order __m) _NOEXCEPT
2023{
2024    return __o->fetch_add(__op, __m);
2025}
2026
2027template <class _Tp>
2028_LIBCPP_INLINE_VISIBILITY
2029_Tp*
2030atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2031{
2032    return __o->fetch_add(__op, __m);
2033}
2034
2035// atomic_fetch_sub
2036
2037template <class _Tp>
2038_LIBCPP_INLINE_VISIBILITY
2039typename enable_if
2040<
2041    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2042    _Tp
2043>::type
2044atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2045{
2046    return __o->fetch_sub(__op);
2047}
2048
2049template <class _Tp>
2050_LIBCPP_INLINE_VISIBILITY
2051typename enable_if
2052<
2053    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2054    _Tp
2055>::type
2056atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2057{
2058    return __o->fetch_sub(__op);
2059}
2060
2061template <class _Tp>
2062_LIBCPP_INLINE_VISIBILITY
2063_Tp*
2064atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2065{
2066    return __o->fetch_sub(__op);
2067}
2068
2069template <class _Tp>
2070_LIBCPP_INLINE_VISIBILITY
2071_Tp*
2072atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2073{
2074    return __o->fetch_sub(__op);
2075}
2076
2077// atomic_fetch_sub_explicit
2078
2079template <class _Tp>
2080_LIBCPP_INLINE_VISIBILITY
2081typename enable_if
2082<
2083    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2084    _Tp
2085>::type
2086atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2087{
2088    return __o->fetch_sub(__op, __m);
2089}
2090
2091template <class _Tp>
2092_LIBCPP_INLINE_VISIBILITY
2093typename enable_if
2094<
2095    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2096    _Tp
2097>::type
2098atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2099{
2100    return __o->fetch_sub(__op, __m);
2101}
2102
2103template <class _Tp>
2104_LIBCPP_INLINE_VISIBILITY
2105_Tp*
2106atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2107                          memory_order __m) _NOEXCEPT
2108{
2109    return __o->fetch_sub(__op, __m);
2110}
2111
2112template <class _Tp>
2113_LIBCPP_INLINE_VISIBILITY
2114_Tp*
2115atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2116{
2117    return __o->fetch_sub(__op, __m);
2118}
2119
2120// atomic_fetch_and
2121
2122template <class _Tp>
2123_LIBCPP_INLINE_VISIBILITY
2124typename enable_if
2125<
2126    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2127    _Tp
2128>::type
2129atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2130{
2131    return __o->fetch_and(__op);
2132}
2133
2134template <class _Tp>
2135_LIBCPP_INLINE_VISIBILITY
2136typename enable_if
2137<
2138    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2139    _Tp
2140>::type
2141atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2142{
2143    return __o->fetch_and(__op);
2144}
2145
2146// atomic_fetch_and_explicit
2147
2148template <class _Tp>
2149_LIBCPP_INLINE_VISIBILITY
2150typename enable_if
2151<
2152    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2153    _Tp
2154>::type
2155atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2156{
2157    return __o->fetch_and(__op, __m);
2158}
2159
2160template <class _Tp>
2161_LIBCPP_INLINE_VISIBILITY
2162typename enable_if
2163<
2164    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2165    _Tp
2166>::type
2167atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2168{
2169    return __o->fetch_and(__op, __m);
2170}
2171
2172// atomic_fetch_or
2173
2174template <class _Tp>
2175_LIBCPP_INLINE_VISIBILITY
2176typename enable_if
2177<
2178    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2179    _Tp
2180>::type
2181atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2182{
2183    return __o->fetch_or(__op);
2184}
2185
2186template <class _Tp>
2187_LIBCPP_INLINE_VISIBILITY
2188typename enable_if
2189<
2190    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2191    _Tp
2192>::type
2193atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2194{
2195    return __o->fetch_or(__op);
2196}
2197
2198// atomic_fetch_or_explicit
2199
2200template <class _Tp>
2201_LIBCPP_INLINE_VISIBILITY
2202typename enable_if
2203<
2204    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2205    _Tp
2206>::type
2207atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2208{
2209    return __o->fetch_or(__op, __m);
2210}
2211
2212template <class _Tp>
2213_LIBCPP_INLINE_VISIBILITY
2214typename enable_if
2215<
2216    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2217    _Tp
2218>::type
2219atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2220{
2221    return __o->fetch_or(__op, __m);
2222}
2223
2224// atomic_fetch_xor
2225
2226template <class _Tp>
2227_LIBCPP_INLINE_VISIBILITY
2228typename enable_if
2229<
2230    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2231    _Tp
2232>::type
2233atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2234{
2235    return __o->fetch_xor(__op);
2236}
2237
2238template <class _Tp>
2239_LIBCPP_INLINE_VISIBILITY
2240typename enable_if
2241<
2242    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2243    _Tp
2244>::type
2245atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2246{
2247    return __o->fetch_xor(__op);
2248}
2249
2250// atomic_fetch_xor_explicit
2251
2252template <class _Tp>
2253_LIBCPP_INLINE_VISIBILITY
2254typename enable_if
2255<
2256    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2257    _Tp
2258>::type
2259atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2260{
2261    return __o->fetch_xor(__op, __m);
2262}
2263
2264template <class _Tp>
2265_LIBCPP_INLINE_VISIBILITY
2266typename enable_if
2267<
2268    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2269    _Tp
2270>::type
2271atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2272{
2273    return __o->fetch_xor(__op, __m);
2274}
2275
2276// flag type and operations
2277
2278typedef struct atomic_flag
2279{
2280    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2281
2282    _LIBCPP_INLINE_VISIBILITY
2283    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2284        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2285    _LIBCPP_INLINE_VISIBILITY
2286    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2287        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2288    _LIBCPP_INLINE_VISIBILITY
2289    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2290        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2291    _LIBCPP_INLINE_VISIBILITY
2292    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2293        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2294
2295    _LIBCPP_INLINE_VISIBILITY
2296    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2297
2298    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2299    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2300
2301#ifndef _LIBCPP_CXX03_LANG
2302    atomic_flag(const atomic_flag&) = delete;
2303    atomic_flag& operator=(const atomic_flag&) = delete;
2304    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2305#else
2306private:
2307    atomic_flag(const atomic_flag&);
2308    atomic_flag& operator=(const atomic_flag&);
2309    atomic_flag& operator=(const atomic_flag&) volatile;
2310#endif
2311} atomic_flag;
2312
2313inline _LIBCPP_INLINE_VISIBILITY
2314bool
2315atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2316{
2317    return __o->test_and_set();
2318}
2319
2320inline _LIBCPP_INLINE_VISIBILITY
2321bool
2322atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2323{
2324    return __o->test_and_set();
2325}
2326
2327inline _LIBCPP_INLINE_VISIBILITY
2328bool
2329atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2330{
2331    return __o->test_and_set(__m);
2332}
2333
2334inline _LIBCPP_INLINE_VISIBILITY
2335bool
2336atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2337{
2338    return __o->test_and_set(__m);
2339}
2340
2341inline _LIBCPP_INLINE_VISIBILITY
2342void
2343atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2344{
2345    __o->clear();
2346}
2347
2348inline _LIBCPP_INLINE_VISIBILITY
2349void
2350atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2351{
2352    __o->clear();
2353}
2354
2355inline _LIBCPP_INLINE_VISIBILITY
2356void
2357atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2358{
2359    __o->clear(__m);
2360}
2361
2362inline _LIBCPP_INLINE_VISIBILITY
2363void
2364atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2365{
2366    __o->clear(__m);
2367}
2368
2369// fences
2370
2371inline _LIBCPP_INLINE_VISIBILITY
2372void
2373atomic_thread_fence(memory_order __m) _NOEXCEPT
2374{
2375    __cxx_atomic_thread_fence(__m);
2376}
2377
2378inline _LIBCPP_INLINE_VISIBILITY
2379void
2380atomic_signal_fence(memory_order __m) _NOEXCEPT
2381{
2382    __cxx_atomic_signal_fence(__m);
2383}
2384
2385// Atomics for standard typedef types
2386
2387typedef atomic<bool>               atomic_bool;
2388typedef atomic<char>               atomic_char;
2389typedef atomic<signed char>        atomic_schar;
2390typedef atomic<unsigned char>      atomic_uchar;
2391typedef atomic<short>              atomic_short;
2392typedef atomic<unsigned short>     atomic_ushort;
2393typedef atomic<int>                atomic_int;
2394typedef atomic<unsigned int>       atomic_uint;
2395typedef atomic<long>               atomic_long;
2396typedef atomic<unsigned long>      atomic_ulong;
2397typedef atomic<long long>          atomic_llong;
2398typedef atomic<unsigned long long> atomic_ullong;
2399typedef atomic<char16_t>           atomic_char16_t;
2400typedef atomic<char32_t>           atomic_char32_t;
2401typedef atomic<wchar_t>            atomic_wchar_t;
2402
2403typedef atomic<int_least8_t>   atomic_int_least8_t;
2404typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2405typedef atomic<int_least16_t>  atomic_int_least16_t;
2406typedef atomic<uint_least16_t> atomic_uint_least16_t;
2407typedef atomic<int_least32_t>  atomic_int_least32_t;
2408typedef atomic<uint_least32_t> atomic_uint_least32_t;
2409typedef atomic<int_least64_t>  atomic_int_least64_t;
2410typedef atomic<uint_least64_t> atomic_uint_least64_t;
2411
2412typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2413typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2414typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2415typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2416typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2417typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2418typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2419typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2420
2421typedef atomic< int8_t>  atomic_int8_t;
2422typedef atomic<uint8_t>  atomic_uint8_t;
2423typedef atomic< int16_t> atomic_int16_t;
2424typedef atomic<uint16_t> atomic_uint16_t;
2425typedef atomic< int32_t> atomic_int32_t;
2426typedef atomic<uint32_t> atomic_uint32_t;
2427typedef atomic< int64_t> atomic_int64_t;
2428typedef atomic<uint64_t> atomic_uint64_t;
2429
2430typedef atomic<intptr_t>  atomic_intptr_t;
2431typedef atomic<uintptr_t> atomic_uintptr_t;
2432typedef atomic<size_t>    atomic_size_t;
2433typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2434typedef atomic<intmax_t>  atomic_intmax_t;
2435typedef atomic<uintmax_t> atomic_uintmax_t;
2436
2437#define ATOMIC_FLAG_INIT {false}
2438#define ATOMIC_VAR_INIT(__v) {__v}
2439
2440_LIBCPP_END_NAMESPACE_STD
2441
2442#endif  // _LIBCPP_ATOMIC
2443