1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2011 Helge Bahmann
7  * Copyright (c) 2013 Tim Blechmann
8  * Copyright (c) 2014 Andrey Semashev
9  */
10 /*!
11  * \file   atomic/detail/atomic_template.hpp
12  *
13  * This header contains interface definition of \c atomic template.
14  */
15 
16 #ifndef BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_
18 
19 #include <cstddef>
20 #include <boost/cstdint.hpp>
21 #include <boost/assert.hpp>
22 #include <boost/type_traits/is_signed.hpp>
23 #include <boost/type_traits/is_integral.hpp>
24 #include <boost/atomic/detail/config.hpp>
25 #include <boost/atomic/detail/bitwise_cast.hpp>
26 #include <boost/atomic/detail/operations_fwd.hpp>
27 
28 #ifdef BOOST_HAS_PRAGMA_ONCE
29 #pragma once
30 #endif
31 
32 #if defined(BOOST_MSVC)
33 #pragma warning(push)
34 // 'boost::atomics::atomic<T>' : multiple assignment operators specified
35 #pragma warning(disable: 4522)
36 #endif
37 
38 /*
39  * IMPLEMENTATION NOTE: All interface functions MUST be declared with BOOST_FORCEINLINE,
40  *                      see comment for convert_memory_order_to_gcc in ops_gcc_atomic.hpp.
41  */
42 
43 namespace boost {
44 namespace atomics {
45 namespace detail {
46 
deduce_failure_order(memory_order order)47 BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order deduce_failure_order(memory_order order) BOOST_NOEXCEPT
48 {
49     return order == memory_order_acq_rel ? memory_order_acquire : (order == memory_order_release ? memory_order_relaxed : order);
50 }
51 
cas_failure_order_must_not_be_stronger_than_success_order(memory_order success_order,memory_order failure_order)52 BOOST_FORCEINLINE BOOST_CONSTEXPR bool cas_failure_order_must_not_be_stronger_than_success_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
53 {
54     // 15 == (memory_order_seq_cst | memory_order_consume), see memory_order.hpp
55     // Given the enum values we can test the strength of memory order requirements with this single condition.
56     return (failure_order & 15u) <= (success_order & 15u);
57 }
58 
59 template< typename T, bool IsInt = boost::is_integral< T >::value >
60 struct classify
61 {
62     typedef void type;
63 };
64 
65 template< typename T >
66 struct classify< T, true > { typedef int type; };
67 
68 template< typename T >
69 struct classify< T*, false > { typedef void* type; };
70 
71 template< typename T, typename Kind >
72 class base_atomic;
73 
74 //! Implementation for integers
75 template< typename T >
76 class base_atomic< T, int >
77 {
78 private:
79     typedef T value_type;
80     typedef T difference_type;
81     typedef atomics::detail::operations< storage_size_of< value_type >::value, boost::is_signed< T >::value > operations;
82 
83 protected:
84     typedef value_type value_arg_type;
85 
86 public:
87     typedef typename operations::storage_type storage_type;
88 
89 protected:
90     typename operations::aligned_storage_type m_storage;
91 
92 public:
base_atomic()93     BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
94     BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {}
95 
store(value_type v,memory_order order=memory_order_seq_cst)96     BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
97     {
98         BOOST_ASSERT(order != memory_order_consume);
99         BOOST_ASSERT(order != memory_order_acquire);
100         BOOST_ASSERT(order != memory_order_acq_rel);
101 
102         operations::store(m_storage.value, static_cast< storage_type >(v), order);
103     }
104 
load(memory_order order=memory_order_seq_cst) const105     BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
106     {
107         BOOST_ASSERT(order != memory_order_release);
108         BOOST_ASSERT(order != memory_order_acq_rel);
109 
110         return static_cast< value_type >(operations::load(m_storage.value, order));
111     }
112 
fetch_add(difference_type v,memory_order order=memory_order_seq_cst)113     BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
114     {
115         return static_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v), order));
116     }
117 
fetch_sub(difference_type v,memory_order order=memory_order_seq_cst)118     BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
119     {
120         return static_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v), order));
121     }
122 
exchange(value_type v,memory_order order=memory_order_seq_cst)123     BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
124     {
125         return static_cast< value_type >(operations::exchange(m_storage.value, static_cast< storage_type >(v), order));
126     }
127 
compare_exchange_strong(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)128     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
129     {
130         BOOST_ASSERT(failure_order != memory_order_release);
131         BOOST_ASSERT(failure_order != memory_order_acq_rel);
132         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
133 
134         storage_type old_value = static_cast< storage_type >(expected);
135         const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
136         expected = static_cast< value_type >(old_value);
137         return res;
138     }
139 
compare_exchange_strong(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)140     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
141     {
142         return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
143     }
144 
compare_exchange_weak(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)145     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
146     {
147         BOOST_ASSERT(failure_order != memory_order_release);
148         BOOST_ASSERT(failure_order != memory_order_acq_rel);
149         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
150 
151         storage_type old_value = static_cast< storage_type >(expected);
152         const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
153         expected = static_cast< value_type >(old_value);
154         return res;
155     }
156 
compare_exchange_weak(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)157     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
158     {
159         return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
160     }
161 
fetch_and(value_type v,memory_order order=memory_order_seq_cst)162     BOOST_FORCEINLINE value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
163     {
164         return static_cast< value_type >(operations::fetch_and(m_storage.value, static_cast< storage_type >(v), order));
165     }
166 
fetch_or(value_type v,memory_order order=memory_order_seq_cst)167     BOOST_FORCEINLINE value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
168     {
169         return static_cast< value_type >(operations::fetch_or(m_storage.value, static_cast< storage_type >(v), order));
170     }
171 
fetch_xor(value_type v,memory_order order=memory_order_seq_cst)172     BOOST_FORCEINLINE value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
173     {
174         return static_cast< value_type >(operations::fetch_xor(m_storage.value, static_cast< storage_type >(v), order));
175     }
176 
is_lock_free() const177     BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
178     {
179         return operations::is_lock_free(m_storage.value);
180     }
181 
operator ++(int)182     BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
183     {
184         return fetch_add(1);
185     }
186 
operator ++()187     BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
188     {
189         return fetch_add(1) + 1;
190     }
191 
operator --(int)192     BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
193     {
194         return fetch_sub(1);
195     }
196 
operator --()197     BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
198     {
199         return fetch_sub(1) - 1;
200     }
201 
operator +=(difference_type v)202     BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
203     {
204         return fetch_add(v) + v;
205     }
206 
operator -=(difference_type v)207     BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
208     {
209         return fetch_sub(v) - v;
210     }
211 
operator &=(value_type v)212     BOOST_FORCEINLINE value_type operator&=(value_type v) volatile BOOST_NOEXCEPT
213     {
214         return fetch_and(v) & v;
215     }
216 
operator |=(value_type v)217     BOOST_FORCEINLINE value_type operator|=(value_type v) volatile BOOST_NOEXCEPT
218     {
219         return fetch_or(v) | v;
220     }
221 
operator ^=(value_type v)222     BOOST_FORCEINLINE value_type operator^=(value_type v) volatile BOOST_NOEXCEPT
223     {
224         return fetch_xor(v) ^ v;
225     }
226 
227     BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
228     BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
229 };
230 
231 //! Implementation for bool
232 template< >
233 class base_atomic< bool, int >
234 {
235 private:
236     typedef bool value_type;
237     typedef atomics::detail::operations< 1u, false > operations;
238 
239 protected:
240     typedef value_type value_arg_type;
241 
242 public:
243     typedef operations::storage_type storage_type;
244 
245 protected:
246     operations::aligned_storage_type m_storage;
247 
248 public:
base_atomic()249     BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
250     BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {}
251 
store(value_type v,memory_order order=memory_order_seq_cst)252     BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
253     {
254         BOOST_ASSERT(order != memory_order_consume);
255         BOOST_ASSERT(order != memory_order_acquire);
256         BOOST_ASSERT(order != memory_order_acq_rel);
257 
258         operations::store(m_storage.value, static_cast< storage_type >(v), order);
259     }
260 
load(memory_order order=memory_order_seq_cst) const261     BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
262     {
263         BOOST_ASSERT(order != memory_order_release);
264         BOOST_ASSERT(order != memory_order_acq_rel);
265 
266         return !!operations::load(m_storage.value, order);
267     }
268 
exchange(value_type v,memory_order order=memory_order_seq_cst)269     BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
270     {
271         return !!operations::exchange(m_storage.value, static_cast< storage_type >(v), order);
272     }
273 
compare_exchange_strong(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)274     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
275     {
276         BOOST_ASSERT(failure_order != memory_order_release);
277         BOOST_ASSERT(failure_order != memory_order_acq_rel);
278         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
279 
280         storage_type old_value = static_cast< storage_type >(expected);
281         const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
282         expected = !!old_value;
283         return res;
284     }
285 
compare_exchange_strong(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)286     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
287     {
288         return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
289     }
290 
compare_exchange_weak(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)291     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
292     {
293         BOOST_ASSERT(failure_order != memory_order_release);
294         BOOST_ASSERT(failure_order != memory_order_acq_rel);
295         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
296 
297         storage_type old_value = static_cast< storage_type >(expected);
298         const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order);
299         expected = !!old_value;
300         return res;
301     }
302 
compare_exchange_weak(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)303     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
304     {
305         return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
306     }
307 
is_lock_free() const308     BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
309     {
310         return operations::is_lock_free(m_storage.value);
311     }
312 
313     BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
314     BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
315 };
316 
317 
318 //! Implementation for user-defined types, such as structs and enums
319 template< typename T >
320 class base_atomic< T, void >
321 {
322 private:
323     typedef T value_type;
324     typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
325 
326 protected:
327     typedef value_type const& value_arg_type;
328 
329 public:
330     typedef typename operations::storage_type storage_type;
331 
332 protected:
333     typename operations::aligned_storage_type m_storage;
334 
335 public:
base_atomic(value_type const & v=value_type ())336     BOOST_FORCEINLINE explicit base_atomic(value_type const& v = value_type()) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
337     {
338     }
339 
store(value_type v,memory_order order=memory_order_seq_cst)340     BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
341     {
342         BOOST_ASSERT(order != memory_order_consume);
343         BOOST_ASSERT(order != memory_order_acquire);
344         BOOST_ASSERT(order != memory_order_acq_rel);
345 
346         operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
347     }
348 
load(memory_order order=memory_order_seq_cst) const349     BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
350     {
351         BOOST_ASSERT(order != memory_order_release);
352         BOOST_ASSERT(order != memory_order_acq_rel);
353 
354         return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
355     }
356 
exchange(value_type v,memory_order order=memory_order_seq_cst)357     BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
358     {
359         return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
360     }
361 
compare_exchange_strong(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)362     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
363     {
364         BOOST_ASSERT(failure_order != memory_order_release);
365         BOOST_ASSERT(failure_order != memory_order_acq_rel);
366         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
367 
368         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
369         const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
370         expected = atomics::detail::bitwise_cast< value_type >(old_value);
371         return res;
372     }
373 
compare_exchange_strong(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)374     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
375     {
376         return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
377     }
378 
compare_exchange_weak(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)379     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
380     {
381         BOOST_ASSERT(failure_order != memory_order_release);
382         BOOST_ASSERT(failure_order != memory_order_acq_rel);
383         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
384 
385         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
386         const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
387         expected = atomics::detail::bitwise_cast< value_type >(old_value);
388         return res;
389     }
390 
compare_exchange_weak(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)391     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
392     {
393         return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
394     }
395 
is_lock_free() const396     BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
397     {
398         return operations::is_lock_free(m_storage.value);
399     }
400 
401     BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
402     BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
403 };
404 
405 
406 //! Implementation for pointers
407 template< typename T >
408 class base_atomic< T*, void* >
409 {
410 private:
411     typedef T* value_type;
412     typedef std::ptrdiff_t difference_type;
413     typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
414 
415 protected:
416     typedef value_type value_arg_type;
417 
418 public:
419     typedef typename operations::storage_type storage_type;
420 
421 protected:
422     typename operations::aligned_storage_type m_storage;
423 
424 public:
base_atomic()425     BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
426     BOOST_FORCEINLINE explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
427     {
428     }
429 
store(value_type v,memory_order order=memory_order_seq_cst)430     BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
431     {
432         BOOST_ASSERT(order != memory_order_consume);
433         BOOST_ASSERT(order != memory_order_acquire);
434         BOOST_ASSERT(order != memory_order_acq_rel);
435 
436         operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
437     }
438 
load(memory_order order=memory_order_seq_cst) const439     BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
440     {
441         BOOST_ASSERT(order != memory_order_release);
442         BOOST_ASSERT(order != memory_order_acq_rel);
443 
444         return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
445     }
446 
fetch_add(difference_type v,memory_order order=memory_order_seq_cst)447     BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
448     {
449         return atomics::detail::bitwise_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order));
450     }
451 
fetch_sub(difference_type v,memory_order order=memory_order_seq_cst)452     BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
453     {
454         return atomics::detail::bitwise_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order));
455     }
456 
exchange(value_type v,memory_order order=memory_order_seq_cst)457     BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
458     {
459         return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
460     }
461 
compare_exchange_strong(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)462     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
463     {
464         BOOST_ASSERT(failure_order != memory_order_release);
465         BOOST_ASSERT(failure_order != memory_order_acq_rel);
466         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
467 
468         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
469         const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
470         expected = atomics::detail::bitwise_cast< value_type >(old_value);
471         return res;
472     }
473 
compare_exchange_strong(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)474     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
475     {
476         return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
477     }
478 
compare_exchange_weak(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)479     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
480     {
481         BOOST_ASSERT(failure_order != memory_order_release);
482         BOOST_ASSERT(failure_order != memory_order_acq_rel);
483         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
484 
485         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
486         const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
487         expected = atomics::detail::bitwise_cast< value_type >(old_value);
488         return res;
489     }
490 
compare_exchange_weak(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)491     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
492     {
493         return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
494     }
495 
is_lock_free() const496     BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
497     {
498         return operations::is_lock_free(m_storage.value);
499     }
500 
operator ++(int)501     BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
502     {
503         return fetch_add(1);
504     }
505 
operator ++()506     BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
507     {
508         return fetch_add(1) + 1;
509     }
510 
operator --(int)511     BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
512     {
513         return fetch_sub(1);
514     }
515 
operator --()516     BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
517     {
518         return fetch_sub(1) - 1;
519     }
520 
operator +=(difference_type v)521     BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
522     {
523         return fetch_add(v) + v;
524     }
525 
operator -=(difference_type v)526     BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
527     {
528         return fetch_sub(v) - v;
529     }
530 
531     BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
532     BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
533 };
534 
535 
536 //! Implementation for void pointers
537 template< >
538 class base_atomic< void*, void* >
539 {
540 private:
541     typedef void* value_type;
542     typedef std::ptrdiff_t difference_type;
543     typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations;
544 
545 protected:
546     typedef value_type value_arg_type;
547 
548 public:
549     typedef operations::storage_type storage_type;
550 
551 protected:
552     operations::aligned_storage_type m_storage;
553 
554 public:
base_atomic()555     BOOST_DEFAULTED_FUNCTION(base_atomic(), {})
556     BOOST_FORCEINLINE explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v))
557     {
558     }
559 
store(value_type v,memory_order order=memory_order_seq_cst)560     BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
561     {
562         BOOST_ASSERT(order != memory_order_consume);
563         BOOST_ASSERT(order != memory_order_acquire);
564         BOOST_ASSERT(order != memory_order_acq_rel);
565 
566         operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order);
567     }
568 
load(memory_order order=memory_order_seq_cst) const569     BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
570     {
571         BOOST_ASSERT(order != memory_order_release);
572         BOOST_ASSERT(order != memory_order_acq_rel);
573 
574         return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order));
575     }
576 
fetch_add(difference_type v,memory_order order=memory_order_seq_cst)577     BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
578     {
579         return atomics::detail::bitwise_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v), order));
580     }
581 
fetch_sub(difference_type v,memory_order order=memory_order_seq_cst)582     BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
583     {
584         return atomics::detail::bitwise_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v), order));
585     }
586 
exchange(value_type v,memory_order order=memory_order_seq_cst)587     BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
588     {
589         return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order));
590     }
591 
compare_exchange_strong(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)592     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
593     {
594         BOOST_ASSERT(failure_order != memory_order_release);
595         BOOST_ASSERT(failure_order != memory_order_acq_rel);
596         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
597 
598         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
599         const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
600         expected = atomics::detail::bitwise_cast< value_type >(old_value);
601         return res;
602     }
603 
compare_exchange_strong(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)604     BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
605     {
606         return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order));
607     }
608 
compare_exchange_weak(value_type & expected,value_type desired,memory_order success_order,memory_order failure_order)609     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT
610     {
611         BOOST_ASSERT(failure_order != memory_order_release);
612         BOOST_ASSERT(failure_order != memory_order_acq_rel);
613         BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order));
614 
615         storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected);
616         const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order);
617         expected = atomics::detail::bitwise_cast< value_type >(old_value);
618         return res;
619     }
620 
compare_exchange_weak(value_type & expected,value_type desired,memory_order order=memory_order_seq_cst)621     BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
622     {
623         return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order));
624     }
625 
is_lock_free() const626     BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT
627     {
628         return operations::is_lock_free(m_storage.value);
629     }
630 
operator ++(int)631     BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT
632     {
633         return fetch_add(1);
634     }
635 
operator ++()636     BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT
637     {
638         return (char*)fetch_add(1) + 1;
639     }
640 
operator --(int)641     BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT
642     {
643         return fetch_sub(1);
644     }
645 
operator --()646     BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT
647     {
648         return (char*)fetch_sub(1) - 1;
649     }
650 
operator +=(difference_type v)651     BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT
652     {
653         return (char*)fetch_add(v) + v;
654     }
655 
operator -=(difference_type v)656     BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT
657     {
658         return (char*)fetch_sub(v) - v;
659     }
660 
661     BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
662     BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
663 };
664 
665 } // namespace detail
666 
667 template< typename T >
668 class atomic :
669     public atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type >
670 {
671 private:
672     typedef T value_type;
673     typedef atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type > base_type;
674     typedef typename base_type::value_arg_type value_arg_type;
675 
676 public:
677     typedef typename base_type::storage_type storage_type;
678 
679 public:
BOOST_DEFAULTED_FUNCTION(atomic (),BOOST_NOEXCEPT{})680     BOOST_DEFAULTED_FUNCTION(atomic(), BOOST_NOEXCEPT {})
681 
682     // NOTE: The constructor is made explicit because gcc 4.7 complains that
683     //       operator=(value_arg_type) is considered ambiguous with operator=(atomic const&)
684     //       in assignment expressions, even though conversion to atomic<> is less preferred
685     //       than conversion to value_arg_type.
686     BOOST_FORCEINLINE explicit BOOST_CONSTEXPR atomic(value_arg_type v) BOOST_NOEXCEPT : base_type(v) {}
687 
operator =(value_arg_type v)688     BOOST_FORCEINLINE value_type operator= (value_arg_type v) volatile BOOST_NOEXCEPT
689     {
690         this->store(v);
691         return v;
692     }
693 
operator value_type()694     BOOST_FORCEINLINE operator value_type() volatile const BOOST_NOEXCEPT
695     {
696         return this->load();
697     }
698 
storage()699     BOOST_FORCEINLINE storage_type& storage() BOOST_NOEXCEPT { return this->m_storage.value; }
storage()700     BOOST_FORCEINLINE storage_type volatile& storage() volatile BOOST_NOEXCEPT { return this->m_storage.value; }
storage() const701     BOOST_FORCEINLINE storage_type const& storage() const BOOST_NOEXCEPT { return this->m_storage.value; }
storage() const702     BOOST_FORCEINLINE storage_type const volatile& storage() const volatile BOOST_NOEXCEPT { return this->m_storage.value; }
703 
704     BOOST_DELETED_FUNCTION(atomic(atomic const&))
705     BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&))
706     BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&) volatile)
707 };
708 
709 typedef atomic< char > atomic_char;
710 typedef atomic< unsigned char > atomic_uchar;
711 typedef atomic< signed char > atomic_schar;
712 typedef atomic< uint8_t > atomic_uint8_t;
713 typedef atomic< int8_t > atomic_int8_t;
714 typedef atomic< unsigned short > atomic_ushort;
715 typedef atomic< short > atomic_short;
716 typedef atomic< uint16_t > atomic_uint16_t;
717 typedef atomic< int16_t > atomic_int16_t;
718 typedef atomic< unsigned int > atomic_uint;
719 typedef atomic< int > atomic_int;
720 typedef atomic< uint32_t > atomic_uint32_t;
721 typedef atomic< int32_t > atomic_int32_t;
722 typedef atomic< unsigned long > atomic_ulong;
723 typedef atomic< long > atomic_long;
724 typedef atomic< uint64_t > atomic_uint64_t;
725 typedef atomic< int64_t > atomic_int64_t;
726 #ifdef BOOST_HAS_LONG_LONG
727 typedef atomic< boost::ulong_long_type > atomic_ullong;
728 typedef atomic< boost::long_long_type > atomic_llong;
729 #endif
730 typedef atomic< void* > atomic_address;
731 typedef atomic< bool > atomic_bool;
732 typedef atomic< wchar_t > atomic_wchar_t;
733 #if !defined(BOOST_NO_CXX11_CHAR16_T)
734 typedef atomic< char16_t > atomic_char16_t;
735 #endif
736 #if !defined(BOOST_NO_CXX11_CHAR32_T)
737 typedef atomic< char32_t > atomic_char32_t;
738 #endif
739 
740 typedef atomic< int_least8_t > atomic_int_least8_t;
741 typedef atomic< uint_least8_t > atomic_uint_least8_t;
742 typedef atomic< int_least16_t > atomic_int_least16_t;
743 typedef atomic< uint_least16_t > atomic_uint_least16_t;
744 typedef atomic< int_least32_t > atomic_int_least32_t;
745 typedef atomic< uint_least32_t > atomic_uint_least32_t;
746 typedef atomic< int_least64_t > atomic_int_least64_t;
747 typedef atomic< uint_least64_t > atomic_uint_least64_t;
748 typedef atomic< int_fast8_t > atomic_int_fast8_t;
749 typedef atomic< uint_fast8_t > atomic_uint_fast8_t;
750 typedef atomic< int_fast16_t > atomic_int_fast16_t;
751 typedef atomic< uint_fast16_t > atomic_uint_fast16_t;
752 typedef atomic< int_fast32_t > atomic_int_fast32_t;
753 typedef atomic< uint_fast32_t > atomic_uint_fast32_t;
754 typedef atomic< int_fast64_t > atomic_int_fast64_t;
755 typedef atomic< uint_fast64_t > atomic_uint_fast64_t;
756 typedef atomic< intmax_t > atomic_intmax_t;
757 typedef atomic< uintmax_t > atomic_uintmax_t;
758 
759 typedef atomic< std::size_t > atomic_size_t;
760 typedef atomic< std::ptrdiff_t > atomic_ptrdiff_t;
761 
762 #if defined(BOOST_HAS_INTPTR_T)
763 typedef atomic< intptr_t > atomic_intptr_t;
764 typedef atomic< uintptr_t > atomic_uintptr_t;
765 #endif
766 
767 } // namespace atomics
768 } // namespace boost
769 
770 #if defined(BOOST_MSVC)
771 #pragma warning(pop)
772 #endif
773 
774 #endif // BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_
775