1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2009 Helge Bahmann
7  * Copyright (c) 2012 Tim Blechmann
8  * Copyright (c) 2014 Andrey Semashev
9  */
10 /*!
11  * \file   atomic/detail/ops_msvc_arm.hpp
12  *
13  * This header contains implementation of the \c operations template.
14  */
15 
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
18 
19 #include <intrin.h>
20 #include <boost/memory_order.hpp>
21 #include <boost/type_traits/make_signed.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/interlocked.hpp>
24 #include <boost/atomic/detail/storage_type.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/capabilities.hpp>
27 #include <boost/atomic/detail/ops_msvc_common.hpp>
28 
29 #ifdef BOOST_HAS_PRAGMA_ONCE
30 #pragma once
31 #endif
32 
33 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
37 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
41 
42 namespace boost {
43 namespace atomics {
44 namespace detail {
45 
46 // A note about memory_order_consume. Technically, this architecture allows to avoid
47 // unnecessary memory barrier after consume load since it supports data dependency ordering.
48 // However, some compiler optimizations may break a seemingly valid code relying on data
49 // dependency tracking by injecting bogus branches to aid out of order execution.
50 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
51 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
52 // For this reason we promote memory_order_consume to memory_order_acquire.
53 
54 struct msvc_arm_operations_base
55 {
hardware_full_fenceboost::atomics::detail::msvc_arm_operations_base56     static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
57     {
58         __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
59     }
60 
fence_before_storeboost::atomics::detail::msvc_arm_operations_base61     static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
62     {
63         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
64 
65         if ((order & memory_order_release) != 0)
66             hardware_full_fence();
67 
68         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
69     }
70 
fence_after_storeboost::atomics::detail::msvc_arm_operations_base71     static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
72     {
73         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
74 
75         if (order == memory_order_seq_cst)
76             hardware_full_fence();
77 
78         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
79     }
80 
fence_after_loadboost::atomics::detail::msvc_arm_operations_base81     static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
82     {
83         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
84 
85         if ((order & (memory_order_consume | memory_order_acquire)) != 0)
86             hardware_full_fence();
87 
88         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
89     }
90 
cas_common_orderboost::atomics::detail::msvc_arm_operations_base91     static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
92     {
93         // Combine order flags together and promote memory_order_consume to memory_order_acquire
94         return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
95     }
96 };
97 
98 template< typename T, typename Derived >
99 struct msvc_arm_operations :
100     public msvc_arm_operations_base
101 {
102     typedef T storage_type;
103 
fetch_subboost::atomics::detail::msvc_arm_operations104     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
105     {
106         typedef typename make_signed< storage_type >::type signed_storage_type;
107         return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
108     }
109 
compare_exchange_weakboost::atomics::detail::msvc_arm_operations110     static BOOST_FORCEINLINE bool compare_exchange_weak(
111         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
112     {
113         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
114     }
115 
test_and_setboost::atomics::detail::msvc_arm_operations116     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
117     {
118         return !!Derived::exchange(storage, (storage_type)1, order);
119     }
120 
clearboost::atomics::detail::msvc_arm_operations121     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
122     {
123         Derived::store(storage, (storage_type)0, order);
124     }
125 
is_lock_freeboost::atomics::detail::msvc_arm_operations126     static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
127     {
128         return true;
129     }
130 };
131 
132 template< bool Signed >
133 struct operations< 1u, Signed > :
134     public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
135 {
136     typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
137     typedef typename base_type::storage_type storage_type;
138 
storeboost::atomics::detail::operations139     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
140     {
141         base_type::fence_before_store(order);
142         BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
143         base_type::fence_after_store(order);
144     }
145 
loadboost::atomics::detail::operations146     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
147     {
148         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
149         base_type::fence_after_load(order);
150         return v;
151     }
152 
fetch_addboost::atomics::detail::operations153     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
154     {
155         switch (order)
156         {
157         case memory_order_relaxed:
158             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
159             break;
160         case memory_order_consume:
161         case memory_order_acquire:
162             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
163             break;
164         case memory_order_release:
165             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
166             break;
167         case memory_order_acq_rel:
168         case memory_order_seq_cst:
169         default:
170             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
171             break;
172         }
173         return v;
174     }
175 
exchangeboost::atomics::detail::operations176     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
177     {
178         switch (order)
179         {
180         case memory_order_relaxed:
181             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
182             break;
183         case memory_order_consume:
184         case memory_order_acquire:
185             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
186             break;
187         case memory_order_release:
188             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
189             break;
190         case memory_order_acq_rel:
191         case memory_order_seq_cst:
192         default:
193             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
194             break;
195         }
196         return v;
197     }
198 
compare_exchange_strongboost::atomics::detail::operations199     static BOOST_FORCEINLINE bool compare_exchange_strong(
200         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
201     {
202         storage_type previous = expected, old_val;
203 
204         switch (cas_common_order(success_order, failure_order))
205         {
206         case memory_order_relaxed:
207             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
208             break;
209         case memory_order_consume:
210         case memory_order_acquire:
211             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
212             break;
213         case memory_order_release:
214             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
215             break;
216         case memory_order_acq_rel:
217         case memory_order_seq_cst:
218         default:
219             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
220             break;
221         }
222         expected = old_val;
223 
224         return (previous == old_val);
225     }
226 
fetch_andboost::atomics::detail::operations227     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
228     {
229         switch (order)
230         {
231         case memory_order_relaxed:
232             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
233             break;
234         case memory_order_consume:
235         case memory_order_acquire:
236             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
237             break;
238         case memory_order_release:
239             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
240             break;
241         case memory_order_acq_rel:
242         case memory_order_seq_cst:
243         default:
244             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
245             break;
246         }
247         return v;
248     }
249 
fetch_orboost::atomics::detail::operations250     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
251     {
252         switch (order)
253         {
254         case memory_order_relaxed:
255             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
256             break;
257         case memory_order_consume:
258         case memory_order_acquire:
259             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
260             break;
261         case memory_order_release:
262             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
263             break;
264         case memory_order_acq_rel:
265         case memory_order_seq_cst:
266         default:
267             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
268             break;
269         }
270         return v;
271     }
272 
fetch_xorboost::atomics::detail::operations273     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
274     {
275         switch (order)
276         {
277         case memory_order_relaxed:
278             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
279             break;
280         case memory_order_consume:
281         case memory_order_acquire:
282             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
283             break;
284         case memory_order_release:
285             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
286             break;
287         case memory_order_acq_rel:
288         case memory_order_seq_cst:
289         default:
290             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
291             break;
292         }
293         return v;
294     }
295 };
296 
297 template< bool Signed >
298 struct operations< 2u, Signed > :
299     public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
300 {
301     typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
302     typedef typename base_type::storage_type storage_type;
303 
storeboost::atomics::detail::operations304     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
305     {
306         base_type::fence_before_store(order);
307         BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
308         base_type::fence_after_store(order);
309     }
310 
loadboost::atomics::detail::operations311     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
312     {
313         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
314         base_type::fence_after_load(order);
315         return v;
316     }
317 
fetch_addboost::atomics::detail::operations318     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
319     {
320         switch (order)
321         {
322         case memory_order_relaxed:
323             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
324             break;
325         case memory_order_consume:
326         case memory_order_acquire:
327             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
328             break;
329         case memory_order_release:
330             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
331             break;
332         case memory_order_acq_rel:
333         case memory_order_seq_cst:
334         default:
335             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
336             break;
337         }
338         return v;
339     }
340 
exchangeboost::atomics::detail::operations341     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
342     {
343         switch (order)
344         {
345         case memory_order_relaxed:
346             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
347             break;
348         case memory_order_consume:
349         case memory_order_acquire:
350             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
351             break;
352         case memory_order_release:
353             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
354             break;
355         case memory_order_acq_rel:
356         case memory_order_seq_cst:
357         default:
358             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
359             break;
360         }
361         return v;
362     }
363 
compare_exchange_strongboost::atomics::detail::operations364     static BOOST_FORCEINLINE bool compare_exchange_strong(
365         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
366     {
367         storage_type previous = expected, old_val;
368 
369         switch (cas_common_order(success_order, failure_order))
370         {
371         case memory_order_relaxed:
372             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
373             break;
374         case memory_order_consume:
375         case memory_order_acquire:
376             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
377             break;
378         case memory_order_release:
379             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
380             break;
381         case memory_order_acq_rel:
382         case memory_order_seq_cst:
383         default:
384             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
385             break;
386         }
387         expected = old_val;
388 
389         return (previous == old_val);
390     }
391 
fetch_andboost::atomics::detail::operations392     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
393     {
394         switch (order)
395         {
396         case memory_order_relaxed:
397             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
398             break;
399         case memory_order_consume:
400         case memory_order_acquire:
401             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
402             break;
403         case memory_order_release:
404             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
405             break;
406         case memory_order_acq_rel:
407         case memory_order_seq_cst:
408         default:
409             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
410             break;
411         }
412         return v;
413     }
414 
fetch_orboost::atomics::detail::operations415     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
416     {
417         switch (order)
418         {
419         case memory_order_relaxed:
420             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
421             break;
422         case memory_order_consume:
423         case memory_order_acquire:
424             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
425             break;
426         case memory_order_release:
427             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
428             break;
429         case memory_order_acq_rel:
430         case memory_order_seq_cst:
431         default:
432             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
433             break;
434         }
435         return v;
436     }
437 
fetch_xorboost::atomics::detail::operations438     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
439     {
440         switch (order)
441         {
442         case memory_order_relaxed:
443             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
444             break;
445         case memory_order_consume:
446         case memory_order_acquire:
447             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
448             break;
449         case memory_order_release:
450             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
451             break;
452         case memory_order_acq_rel:
453         case memory_order_seq_cst:
454         default:
455             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
456             break;
457         }
458         return v;
459     }
460 };
461 
462 template< bool Signed >
463 struct operations< 4u, Signed > :
464     public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
465 {
466     typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
467     typedef typename base_type::storage_type storage_type;
468 
storeboost::atomics::detail::operations469     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
470     {
471         base_type::fence_before_store(order);
472         BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
473         base_type::fence_after_store(order);
474     }
475 
loadboost::atomics::detail::operations476     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
477     {
478         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
479         base_type::fence_after_load(order);
480         return v;
481     }
482 
fetch_addboost::atomics::detail::operations483     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
484     {
485         switch (order)
486         {
487         case memory_order_relaxed:
488             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
489             break;
490         case memory_order_consume:
491         case memory_order_acquire:
492             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
493             break;
494         case memory_order_release:
495             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
496             break;
497         case memory_order_acq_rel:
498         case memory_order_seq_cst:
499         default:
500             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
501             break;
502         }
503         return v;
504     }
505 
exchangeboost::atomics::detail::operations506     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
507     {
508         switch (order)
509         {
510         case memory_order_relaxed:
511             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
512             break;
513         case memory_order_consume:
514         case memory_order_acquire:
515             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
516             break;
517         case memory_order_release:
518             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
519             break;
520         case memory_order_acq_rel:
521         case memory_order_seq_cst:
522         default:
523             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
524             break;
525         }
526         return v;
527     }
528 
compare_exchange_strongboost::atomics::detail::operations529     static BOOST_FORCEINLINE bool compare_exchange_strong(
530         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
531     {
532         storage_type previous = expected, old_val;
533 
534         switch (cas_common_order(success_order, failure_order))
535         {
536         case memory_order_relaxed:
537             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
538             break;
539         case memory_order_consume:
540         case memory_order_acquire:
541             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
542             break;
543         case memory_order_release:
544             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
545             break;
546         case memory_order_acq_rel:
547         case memory_order_seq_cst:
548         default:
549             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
550             break;
551         }
552         expected = old_val;
553 
554         return (previous == old_val);
555     }
556 
fetch_andboost::atomics::detail::operations557     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
558     {
559         switch (order)
560         {
561         case memory_order_relaxed:
562             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
563             break;
564         case memory_order_consume:
565         case memory_order_acquire:
566             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
567             break;
568         case memory_order_release:
569             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
570             break;
571         case memory_order_acq_rel:
572         case memory_order_seq_cst:
573         default:
574             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
575             break;
576         }
577         return v;
578     }
579 
fetch_orboost::atomics::detail::operations580     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
581     {
582         switch (order)
583         {
584         case memory_order_relaxed:
585             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
586             break;
587         case memory_order_consume:
588         case memory_order_acquire:
589             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
590             break;
591         case memory_order_release:
592             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
593             break;
594         case memory_order_acq_rel:
595         case memory_order_seq_cst:
596         default:
597             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
598             break;
599         }
600         return v;
601     }
602 
fetch_xorboost::atomics::detail::operations603     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
604     {
605         switch (order)
606         {
607         case memory_order_relaxed:
608             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
609             break;
610         case memory_order_consume:
611         case memory_order_acquire:
612             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
613             break;
614         case memory_order_release:
615             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
616             break;
617         case memory_order_acq_rel:
618         case memory_order_seq_cst:
619         default:
620             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
621             break;
622         }
623         return v;
624     }
625 };
626 
627 template< bool Signed >
628 struct operations< 8u, Signed > :
629     public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
630 {
631     typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
632     typedef typename base_type::storage_type storage_type;
633 
storeboost::atomics::detail::operations634     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
635     {
636         base_type::fence_before_store(order);
637         BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
638         base_type::fence_after_store(order);
639     }
640 
loadboost::atomics::detail::operations641     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
642     {
643         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
644         base_type::fence_after_load(order);
645         return v;
646     }
647 
fetch_addboost::atomics::detail::operations648     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
649     {
650         switch (order)
651         {
652         case memory_order_relaxed:
653             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
654             break;
655         case memory_order_consume:
656         case memory_order_acquire:
657             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
658             break;
659         case memory_order_release:
660             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
661             break;
662         case memory_order_acq_rel:
663         case memory_order_seq_cst:
664         default:
665             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
666             break;
667         }
668         return v;
669     }
670 
exchangeboost::atomics::detail::operations671     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
672     {
673         switch (order)
674         {
675         case memory_order_relaxed:
676             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
677             break;
678         case memory_order_consume:
679         case memory_order_acquire:
680             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
681             break;
682         case memory_order_release:
683             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
684             break;
685         case memory_order_acq_rel:
686         case memory_order_seq_cst:
687         default:
688             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
689             break;
690         }
691         return v;
692     }
693 
compare_exchange_strongboost::atomics::detail::operations694     static BOOST_FORCEINLINE bool compare_exchange_strong(
695         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
696     {
697         storage_type previous = expected, old_val;
698 
699         switch (cas_common_order(success_order, failure_order))
700         {
701         case memory_order_relaxed:
702             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
703             break;
704         case memory_order_consume:
705         case memory_order_acquire:
706             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
707             break;
708         case memory_order_release:
709             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
710             break;
711         case memory_order_acq_rel:
712         case memory_order_seq_cst:
713         default:
714             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
715             break;
716         }
717         expected = old_val;
718 
719         return (previous == old_val);
720     }
721 
fetch_andboost::atomics::detail::operations722     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
723     {
724         switch (order)
725         {
726         case memory_order_relaxed:
727             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
728             break;
729         case memory_order_consume:
730         case memory_order_acquire:
731             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
732             break;
733         case memory_order_release:
734             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
735             break;
736         case memory_order_acq_rel:
737         case memory_order_seq_cst:
738         default:
739             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
740             break;
741         }
742         return v;
743     }
744 
fetch_orboost::atomics::detail::operations745     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
746     {
747         switch (order)
748         {
749         case memory_order_relaxed:
750             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
751             break;
752         case memory_order_consume:
753         case memory_order_acquire:
754             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
755             break;
756         case memory_order_release:
757             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
758             break;
759         case memory_order_acq_rel:
760         case memory_order_seq_cst:
761         default:
762             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
763             break;
764         }
765         return v;
766     }
767 
fetch_xorboost::atomics::detail::operations768     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
769     {
770         switch (order)
771         {
772         case memory_order_relaxed:
773             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
774             break;
775         case memory_order_consume:
776         case memory_order_acquire:
777             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
778             break;
779         case memory_order_release:
780             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
781             break;
782         case memory_order_acq_rel:
783         case memory_order_seq_cst:
784         default:
785             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
786             break;
787         }
788         return v;
789     }
790 };
791 
792 
thread_fence(memory_order order)793 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
794 {
795     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
796     if (order != memory_order_relaxed)
797         msvc_arm_operations_base::hardware_full_fence();
798     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
799 }
800 
signal_fence(memory_order order)801 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
802 {
803     if (order != memory_order_relaxed)
804         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
805 }
806 
807 } // namespace detail
808 } // namespace atomics
809 } // namespace boost
810 
811 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
812 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
813 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
814 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
815 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
816 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
817 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
818 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
819 
820 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
821