1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2009 Helge Bahmann
7  * Copyright (c) 2012 Tim Blechmann
8  * Copyright (c) 2014 Andrey Semashev
9  */
10 /*!
11  * \file   atomic/detail/core_arch_ops_msvc_arm.hpp
12  *
13  * This header contains implementation of the \c core_arch_operations template.
14  */
15 
16 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_
18 
19 #include <cstddef>
20 #include <boost/memory_order.hpp>
21 #include <boost/atomic/detail/config.hpp>
22 #include <boost/atomic/detail/interlocked.hpp>
23 #include <boost/atomic/detail/storage_traits.hpp>
24 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
25 #include <boost/atomic/detail/type_traits/make_signed.hpp>
26 #include <boost/atomic/detail/ops_msvc_common.hpp>
27 #include <boost/atomic/detail/fence_arch_operations.hpp>
28 #include <boost/atomic/detail/header.hpp>
29 
30 #ifdef BOOST_HAS_PRAGMA_ONCE
31 #pragma once
32 #endif
33 
34 extern "C" {
35 __int8 __iso_volatile_load8(const volatile __int8*);
36 __int16 __iso_volatile_load16(const volatile __int16*);
37 __int32 __iso_volatile_load32(const volatile __int32*);
38 __int64 __iso_volatile_load64(const volatile __int64*);
39 void __iso_volatile_store8(volatile __int8*, __int8);
40 void __iso_volatile_store16(volatile __int16*, __int16);
41 void __iso_volatile_store32(volatile __int32*, __int32);
42 void __iso_volatile_store64(volatile __int64*, __int64);
43 }
44 #if defined(BOOST_MSVC)
45 #pragma intrinsic(__iso_volatile_load8)
46 #pragma intrinsic(__iso_volatile_load16)
47 #pragma intrinsic(__iso_volatile_load32)
48 #pragma intrinsic(__iso_volatile_load64)
49 #pragma intrinsic(__iso_volatile_store8)
50 #pragma intrinsic(__iso_volatile_store16)
51 #pragma intrinsic(__iso_volatile_store32)
52 #pragma intrinsic(__iso_volatile_store64)
53 #endif
54 
55 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
56 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
57 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
58 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
59 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
60 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
61 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
62 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
63 
64 namespace boost {
65 namespace atomics {
66 namespace detail {
67 
68 // A note about memory_order_consume. Technically, this architecture allows to avoid
69 // unnecessary memory barrier after consume load since it supports data dependency ordering.
70 // However, some compiler optimizations may break a seemingly valid code relying on data
71 // dependency tracking by injecting bogus branches to aid out of order execution.
72 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
73 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
74 // For this reason we promote memory_order_consume to memory_order_acquire.
75 
76 struct core_arch_operations_msvc_arm_base
77 {
78     static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
79     static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
80 
fence_before_storeboost::atomics::detail::core_arch_operations_msvc_arm_base81     static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
82     {
83         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
84 
85         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
86             fence_arch_operations::hardware_full_fence();
87 
88         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
89     }
90 
fence_after_storeboost::atomics::detail::core_arch_operations_msvc_arm_base91     static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
92     {
93         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
94 
95         if (order == memory_order_seq_cst)
96             fence_arch_operations::hardware_full_fence();
97 
98         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
99     }
100 
fence_after_loadboost::atomics::detail::core_arch_operations_msvc_arm_base101     static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
102     {
103         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
104 
105         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
106             fence_arch_operations::hardware_full_fence();
107 
108         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
109     }
110 
cas_common_orderboost::atomics::detail::core_arch_operations_msvc_arm_base111     static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
112     {
113         // Combine order flags together and promote memory_order_consume to memory_order_acquire
114         return static_cast< memory_order >(((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & ~static_cast< unsigned int >(memory_order_consume))
115             | (((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & static_cast< unsigned int >(memory_order_consume)) << 1u));
116     }
117 };
118 
119 template< std::size_t Size, bool Signed, bool Interprocess, typename Derived >
120 struct core_arch_operations_msvc_arm :
121     public core_arch_operations_msvc_arm_base
122 {
123     typedef typename storage_traits< Size >::type storage_type;
124 
125     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
126     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = storage_traits< Size >::alignment;
127     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
128     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
129 
fetch_subboost::atomics::detail::core_arch_operations_msvc_arm130     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
131     {
132         typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
133         return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
134     }
135 
compare_exchange_weakboost::atomics::detail::core_arch_operations_msvc_arm136     static BOOST_FORCEINLINE bool compare_exchange_weak(
137         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
138     {
139         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
140     }
141 
test_and_setboost::atomics::detail::core_arch_operations_msvc_arm142     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
143     {
144         return !!Derived::exchange(storage, (storage_type)1, order);
145     }
146 
clearboost::atomics::detail::core_arch_operations_msvc_arm147     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
148     {
149         Derived::store(storage, (storage_type)0, order);
150     }
151 };
152 
153 template< bool Signed, bool Interprocess >
154 struct core_arch_operations< 1u, Signed, Interprocess > :
155     public core_arch_operations_msvc_arm< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > >
156 {
157     typedef core_arch_operations_msvc_arm< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > > base_type;
158     typedef typename base_type::storage_type storage_type;
159 
storeboost::atomics::detail::core_arch_operations160     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
161     {
162         base_type::fence_before_store(order);
163         BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
164         base_type::fence_after_store(order);
165     }
166 
loadboost::atomics::detail::core_arch_operations167     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
168     {
169         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
170         base_type::fence_after_load(order);
171         return v;
172     }
173 
fetch_addboost::atomics::detail::core_arch_operations174     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
175     {
176         switch (order)
177         {
178         case memory_order_relaxed:
179             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
180             break;
181         case memory_order_consume:
182         case memory_order_acquire:
183             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
184             break;
185         case memory_order_release:
186             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
187             break;
188         case memory_order_acq_rel:
189         case memory_order_seq_cst:
190         default:
191             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
192             break;
193         }
194         return v;
195     }
196 
exchangeboost::atomics::detail::core_arch_operations197     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
198     {
199         switch (order)
200         {
201         case memory_order_relaxed:
202             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
203             break;
204         case memory_order_consume:
205         case memory_order_acquire:
206             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
207             break;
208         case memory_order_release:
209             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
210             break;
211         case memory_order_acq_rel:
212         case memory_order_seq_cst:
213         default:
214             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
215             break;
216         }
217         return v;
218     }
219 
compare_exchange_strongboost::atomics::detail::core_arch_operations220     static BOOST_FORCEINLINE bool compare_exchange_strong(
221         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
222     {
223         storage_type previous = expected, old_val;
224 
225         switch (cas_common_order(success_order, failure_order))
226         {
227         case memory_order_relaxed:
228             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
229             break;
230         case memory_order_consume:
231         case memory_order_acquire:
232             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
233             break;
234         case memory_order_release:
235             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
236             break;
237         case memory_order_acq_rel:
238         case memory_order_seq_cst:
239         default:
240             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
241             break;
242         }
243         expected = old_val;
244 
245         return (previous == old_val);
246     }
247 
fetch_andboost::atomics::detail::core_arch_operations248     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
249     {
250         switch (order)
251         {
252         case memory_order_relaxed:
253             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
254             break;
255         case memory_order_consume:
256         case memory_order_acquire:
257             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
258             break;
259         case memory_order_release:
260             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
261             break;
262         case memory_order_acq_rel:
263         case memory_order_seq_cst:
264         default:
265             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
266             break;
267         }
268         return v;
269     }
270 
fetch_orboost::atomics::detail::core_arch_operations271     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
272     {
273         switch (order)
274         {
275         case memory_order_relaxed:
276             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
277             break;
278         case memory_order_consume:
279         case memory_order_acquire:
280             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
281             break;
282         case memory_order_release:
283             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
284             break;
285         case memory_order_acq_rel:
286         case memory_order_seq_cst:
287         default:
288             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
289             break;
290         }
291         return v;
292     }
293 
fetch_xorboost::atomics::detail::core_arch_operations294     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
295     {
296         switch (order)
297         {
298         case memory_order_relaxed:
299             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
300             break;
301         case memory_order_consume:
302         case memory_order_acquire:
303             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
304             break;
305         case memory_order_release:
306             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
307             break;
308         case memory_order_acq_rel:
309         case memory_order_seq_cst:
310         default:
311             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
312             break;
313         }
314         return v;
315     }
316 };
317 
318 template< bool Signed, bool Interprocess >
319 struct core_arch_operations< 2u, Signed, Interprocess > :
320     public core_arch_operations_msvc_arm< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > >
321 {
322     typedef core_arch_operations_msvc_arm< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > > base_type;
323     typedef typename base_type::storage_type storage_type;
324 
storeboost::atomics::detail::core_arch_operations325     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
326     {
327         base_type::fence_before_store(order);
328         BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
329         base_type::fence_after_store(order);
330     }
331 
loadboost::atomics::detail::core_arch_operations332     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
333     {
334         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
335         base_type::fence_after_load(order);
336         return v;
337     }
338 
fetch_addboost::atomics::detail::core_arch_operations339     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
340     {
341         switch (order)
342         {
343         case memory_order_relaxed:
344             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
345             break;
346         case memory_order_consume:
347         case memory_order_acquire:
348             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
349             break;
350         case memory_order_release:
351             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
352             break;
353         case memory_order_acq_rel:
354         case memory_order_seq_cst:
355         default:
356             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
357             break;
358         }
359         return v;
360     }
361 
exchangeboost::atomics::detail::core_arch_operations362     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
363     {
364         switch (order)
365         {
366         case memory_order_relaxed:
367             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
368             break;
369         case memory_order_consume:
370         case memory_order_acquire:
371             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
372             break;
373         case memory_order_release:
374             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
375             break;
376         case memory_order_acq_rel:
377         case memory_order_seq_cst:
378         default:
379             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
380             break;
381         }
382         return v;
383     }
384 
compare_exchange_strongboost::atomics::detail::core_arch_operations385     static BOOST_FORCEINLINE bool compare_exchange_strong(
386         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
387     {
388         storage_type previous = expected, old_val;
389 
390         switch (cas_common_order(success_order, failure_order))
391         {
392         case memory_order_relaxed:
393             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
394             break;
395         case memory_order_consume:
396         case memory_order_acquire:
397             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
398             break;
399         case memory_order_release:
400             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
401             break;
402         case memory_order_acq_rel:
403         case memory_order_seq_cst:
404         default:
405             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
406             break;
407         }
408         expected = old_val;
409 
410         return (previous == old_val);
411     }
412 
fetch_andboost::atomics::detail::core_arch_operations413     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
414     {
415         switch (order)
416         {
417         case memory_order_relaxed:
418             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
419             break;
420         case memory_order_consume:
421         case memory_order_acquire:
422             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
423             break;
424         case memory_order_release:
425             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
426             break;
427         case memory_order_acq_rel:
428         case memory_order_seq_cst:
429         default:
430             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
431             break;
432         }
433         return v;
434     }
435 
fetch_orboost::atomics::detail::core_arch_operations436     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
437     {
438         switch (order)
439         {
440         case memory_order_relaxed:
441             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
442             break;
443         case memory_order_consume:
444         case memory_order_acquire:
445             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
446             break;
447         case memory_order_release:
448             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
449             break;
450         case memory_order_acq_rel:
451         case memory_order_seq_cst:
452         default:
453             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
454             break;
455         }
456         return v;
457     }
458 
fetch_xorboost::atomics::detail::core_arch_operations459     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
460     {
461         switch (order)
462         {
463         case memory_order_relaxed:
464             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
465             break;
466         case memory_order_consume:
467         case memory_order_acquire:
468             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
469             break;
470         case memory_order_release:
471             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
472             break;
473         case memory_order_acq_rel:
474         case memory_order_seq_cst:
475         default:
476             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
477             break;
478         }
479         return v;
480     }
481 };
482 
483 template< bool Signed, bool Interprocess >
484 struct core_arch_operations< 4u, Signed, Interprocess > :
485     public core_arch_operations_msvc_arm< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > >
486 {
487     typedef core_arch_operations_msvc_arm< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > > base_type;
488     typedef typename base_type::storage_type storage_type;
489 
storeboost::atomics::detail::core_arch_operations490     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
491     {
492         base_type::fence_before_store(order);
493         BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
494         base_type::fence_after_store(order);
495     }
496 
loadboost::atomics::detail::core_arch_operations497     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
498     {
499         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
500         base_type::fence_after_load(order);
501         return v;
502     }
503 
fetch_addboost::atomics::detail::core_arch_operations504     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
505     {
506         switch (order)
507         {
508         case memory_order_relaxed:
509             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
510             break;
511         case memory_order_consume:
512         case memory_order_acquire:
513             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
514             break;
515         case memory_order_release:
516             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
517             break;
518         case memory_order_acq_rel:
519         case memory_order_seq_cst:
520         default:
521             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
522             break;
523         }
524         return v;
525     }
526 
exchangeboost::atomics::detail::core_arch_operations527     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
528     {
529         switch (order)
530         {
531         case memory_order_relaxed:
532             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
533             break;
534         case memory_order_consume:
535         case memory_order_acquire:
536             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
537             break;
538         case memory_order_release:
539             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
540             break;
541         case memory_order_acq_rel:
542         case memory_order_seq_cst:
543         default:
544             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
545             break;
546         }
547         return v;
548     }
549 
compare_exchange_strongboost::atomics::detail::core_arch_operations550     static BOOST_FORCEINLINE bool compare_exchange_strong(
551         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
552     {
553         storage_type previous = expected, old_val;
554 
555         switch (cas_common_order(success_order, failure_order))
556         {
557         case memory_order_relaxed:
558             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
559             break;
560         case memory_order_consume:
561         case memory_order_acquire:
562             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
563             break;
564         case memory_order_release:
565             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
566             break;
567         case memory_order_acq_rel:
568         case memory_order_seq_cst:
569         default:
570             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
571             break;
572         }
573         expected = old_val;
574 
575         return (previous == old_val);
576     }
577 
fetch_andboost::atomics::detail::core_arch_operations578     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
579     {
580         switch (order)
581         {
582         case memory_order_relaxed:
583             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
584             break;
585         case memory_order_consume:
586         case memory_order_acquire:
587             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
588             break;
589         case memory_order_release:
590             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
591             break;
592         case memory_order_acq_rel:
593         case memory_order_seq_cst:
594         default:
595             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
596             break;
597         }
598         return v;
599     }
600 
fetch_orboost::atomics::detail::core_arch_operations601     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
602     {
603         switch (order)
604         {
605         case memory_order_relaxed:
606             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
607             break;
608         case memory_order_consume:
609         case memory_order_acquire:
610             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
611             break;
612         case memory_order_release:
613             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
614             break;
615         case memory_order_acq_rel:
616         case memory_order_seq_cst:
617         default:
618             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
619             break;
620         }
621         return v;
622     }
623 
fetch_xorboost::atomics::detail::core_arch_operations624     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
625     {
626         switch (order)
627         {
628         case memory_order_relaxed:
629             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
630             break;
631         case memory_order_consume:
632         case memory_order_acquire:
633             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
634             break;
635         case memory_order_release:
636             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
637             break;
638         case memory_order_acq_rel:
639         case memory_order_seq_cst:
640         default:
641             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
642             break;
643         }
644         return v;
645     }
646 };
647 
648 template< bool Signed, bool Interprocess >
649 struct core_arch_operations< 8u, Signed, Interprocess > :
650     public core_arch_operations_msvc_arm< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > >
651 {
652     typedef core_arch_operations_msvc_arm< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > > base_type;
653     typedef typename base_type::storage_type storage_type;
654 
storeboost::atomics::detail::core_arch_operations655     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
656     {
657         base_type::fence_before_store(order);
658         BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
659         base_type::fence_after_store(order);
660     }
661 
loadboost::atomics::detail::core_arch_operations662     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
663     {
664         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
665         base_type::fence_after_load(order);
666         return v;
667     }
668 
fetch_addboost::atomics::detail::core_arch_operations669     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
670     {
671         switch (order)
672         {
673         case memory_order_relaxed:
674             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
675             break;
676         case memory_order_consume:
677         case memory_order_acquire:
678             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
679             break;
680         case memory_order_release:
681             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
682             break;
683         case memory_order_acq_rel:
684         case memory_order_seq_cst:
685         default:
686             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
687             break;
688         }
689         return v;
690     }
691 
exchangeboost::atomics::detail::core_arch_operations692     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
693     {
694         switch (order)
695         {
696         case memory_order_relaxed:
697             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
698             break;
699         case memory_order_consume:
700         case memory_order_acquire:
701             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
702             break;
703         case memory_order_release:
704             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
705             break;
706         case memory_order_acq_rel:
707         case memory_order_seq_cst:
708         default:
709             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
710             break;
711         }
712         return v;
713     }
714 
compare_exchange_strongboost::atomics::detail::core_arch_operations715     static BOOST_FORCEINLINE bool compare_exchange_strong(
716         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
717     {
718         storage_type previous = expected, old_val;
719 
720         switch (cas_common_order(success_order, failure_order))
721         {
722         case memory_order_relaxed:
723             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
724             break;
725         case memory_order_consume:
726         case memory_order_acquire:
727             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
728             break;
729         case memory_order_release:
730             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
731             break;
732         case memory_order_acq_rel:
733         case memory_order_seq_cst:
734         default:
735             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
736             break;
737         }
738         expected = old_val;
739 
740         return (previous == old_val);
741     }
742 
fetch_andboost::atomics::detail::core_arch_operations743     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
744     {
745         switch (order)
746         {
747         case memory_order_relaxed:
748             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
749             break;
750         case memory_order_consume:
751         case memory_order_acquire:
752             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
753             break;
754         case memory_order_release:
755             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
756             break;
757         case memory_order_acq_rel:
758         case memory_order_seq_cst:
759         default:
760             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
761             break;
762         }
763         return v;
764     }
765 
fetch_orboost::atomics::detail::core_arch_operations766     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
767     {
768         switch (order)
769         {
770         case memory_order_relaxed:
771             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
772             break;
773         case memory_order_consume:
774         case memory_order_acquire:
775             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
776             break;
777         case memory_order_release:
778             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
779             break;
780         case memory_order_acq_rel:
781         case memory_order_seq_cst:
782         default:
783             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
784             break;
785         }
786         return v;
787     }
788 
fetch_xorboost::atomics::detail::core_arch_operations789     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
790     {
791         switch (order)
792         {
793         case memory_order_relaxed:
794             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
795             break;
796         case memory_order_consume:
797         case memory_order_acquire:
798             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
799             break;
800         case memory_order_release:
801             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
802             break;
803         case memory_order_acq_rel:
804         case memory_order_seq_cst:
805         default:
806             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
807             break;
808         }
809         return v;
810     }
811 };
812 
813 } // namespace detail
814 } // namespace atomics
815 } // namespace boost
816 
817 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
818 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
819 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
820 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
821 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
822 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
823 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
824 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
825 
826 #include <boost/atomic/detail/footer.hpp>
827 
828 #endif // BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_
829