1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2020 Andrey Semashev
7  */
8 /*!
9  * \file   atomic/detail/core_arch_ops_gcc_aarch64.hpp
10  *
11  * This header contains implementation of the \c core_arch_operations template.
12  */
13 
14 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_
15 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_
16 
17 #include <cstddef>
18 #include <boost/cstdint.hpp>
19 #include <boost/memory_order.hpp>
20 #include <boost/atomic/detail/config.hpp>
21 #include <boost/atomic/detail/storage_traits.hpp>
22 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
23 #include <boost/atomic/detail/capabilities.hpp>
24 #include <boost/atomic/detail/ops_gcc_aarch64_common.hpp>
25 #include <boost/atomic/detail/header.hpp>
26 
27 #ifdef BOOST_HAS_PRAGMA_ONCE
28 #pragma once
29 #endif
30 
31 namespace boost {
32 namespace atomics {
33 namespace detail {
34 
35 struct core_arch_operations_gcc_aarch64_base
36 {
37     static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
38     static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
39 };
40 
41 // Due to bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63359 we have to explicitly specify size of the registers
42 // to use in the asm blocks below. Use %w prefix for the 32-bit registers and %x for 64-bit ones.
43 
44 // A note about compare_exchange implementations. Since failure_order must never include release semantics and
45 // must not be stronger than success_order, we can always use success_order to select instructions. Thus, when
46 // CAS fails, only the acquire semantics of success_order is applied, which may be stronger than failure_order.
47 
48 template< bool Signed, bool Interprocess >
49 struct core_arch_operations< 1u, Signed, Interprocess > :
50     public core_arch_operations_gcc_aarch64_base
51 {
52     typedef typename storage_traits< 1u >::type storage_type;
53 
54     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
55     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 1u;
56     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
57     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
58 
storeboost::atomics::detail::core_arch_operations59     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
60     {
61         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
62         {
63             __asm__ __volatile__
64             (
65                 "stlrb %w[value], %[storage]\n\t"
66                 : [storage] "=Q" (storage)
67                 : [value] "r" (v)
68                 : "memory"
69             );
70         }
71         else
72         {
73             storage = v;
74         }
75     }
76 
loadboost::atomics::detail::core_arch_operations77     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
78     {
79         storage_type v;
80         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
81         {
82 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
83             if (order == memory_order_consume || order == memory_order_acquire)
84             {
85                 __asm__ __volatile__
86                 (
87                     "ldaprb %w[value], %[storage]\n\t"
88                     : [value] "=r" (v)
89                     : [storage] "Q" (storage)
90                     : "memory"
91                 );
92             }
93             else
94 #endif
95             {
96                 __asm__ __volatile__
97                 (
98                     "ldarb %w[value], %[storage]\n\t"
99                     : [value] "=r" (v)
100                     : [storage] "Q" (storage)
101                     : "memory"
102                 );
103             }
104         }
105         else
106         {
107             v = storage;
108         }
109 
110         return v;
111     }
112 
exchangeboost::atomics::detail::core_arch_operations113     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
114     {
115         storage_type original;
116 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
117 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
118         __asm__ __volatile__\
119         (\
120             "swp" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
121             : [storage] "+Q" (storage), [original] "=r" (original)\
122             : [value] "r" (v)\
123             : "memory"\
124         );
125 #else
126         uint32_t tmp;
127 
128 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
129         __asm__ __volatile__\
130         (\
131             "1:\n\t"\
132             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
133             "st" st_mo "xrb %w[tmp], %w[value], %[storage]\n\t"\
134             "cbnz %w[tmp], 1b\n\t"\
135             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
136             : [value] "r" (v)\
137             : "memory"\
138         );
139 #endif
140 
141         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
142 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
143 
144         return original;
145     }
146 
compare_exchange_weakboost::atomics::detail::core_arch_operations147     static BOOST_FORCEINLINE bool compare_exchange_weak(
148         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
149     {
150         storage_type original;
151 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
152         original = expected;
153 
154 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
155         __asm__ __volatile__\
156         (\
157             "cas" ld_mo st_mo "b %w[original], %w[desired], %[storage]\n\t"\
158             : [storage] "+Q" (storage), [original] "+r" (original)\
159             : [desired] "r" (desired)\
160             : "memory"\
161         );
162 
163         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
164         bool success = original == expected;
165 #else
166         bool success;
167 
168 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
169         __asm__ __volatile__\
170         (\
171             "uxtb %w[expected], %w[expected]\n\t"\
172             "mov %w[success], #0\n\t"\
173             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
174             "cmp %w[original], %w[expected]\n\t"\
175             "b.ne 1f\n\t"\
176             "st" st_mo "xrb %w[success], %w[desired], %[storage]\n\t"\
177             "eor %w[success], %w[success], #1\n\t"\
178             "1:\n\t"\
179             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
180             : [desired] "r" (desired), [expected] "r" (expected)\
181             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
182         );
183 
184         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
185 #endif
186 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
187 
188         expected = original;
189         return success;
190     }
191 
compare_exchange_strongboost::atomics::detail::core_arch_operations192     static BOOST_FORCEINLINE bool compare_exchange_strong(
193         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
194     {
195         storage_type original;
196 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
197         original = expected;
198 
199 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
200         __asm__ __volatile__\
201         (\
202             "cas" ld_mo st_mo "b %w[original], %w[desired], %[storage]\n\t"\
203             : [storage] "+Q" (storage), [original] "+r" (original)\
204             : [desired] "r" (desired)\
205             : "memory"\
206         );
207 
208         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
209         bool success = original == expected;
210 #else
211         bool success;
212 
213 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
214         __asm__ __volatile__\
215         (\
216             "uxtb %w[expected], %w[expected]\n\t"\
217             "1:\n\t"\
218             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
219             "cmp %w[original], %w[expected]\n\t"\
220             "b.ne 2f\n\t"\
221             "st" st_mo "xrb %w[success], %w[desired], %[storage]\n\t"\
222             "cbnz %w[success], 1b\n\t"\
223             "2:\n\t"\
224             "cset %w[success], eq\n\t"\
225             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
226             : [desired] "r" (desired), [expected] "r" (expected)\
227             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
228         );
229 
230         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
231 #endif
232 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
233 
234         expected = original;
235         return success;
236     }
237 
fetch_addboost::atomics::detail::core_arch_operations238     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
239     {
240         storage_type original;
241 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
242 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
243         __asm__ __volatile__\
244         (\
245             "ldadd" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
246             : [storage] "+Q" (storage), [original] "=r" (original)\
247             : [value] "r" (v)\
248             : "memory"\
249         );
250 #else
251         storage_type result;
252         uint32_t tmp;
253 
254 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
255         __asm__ __volatile__\
256         (\
257             "1:\n\t"\
258             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
259             "add %w[result], %w[original], %w[value]\n\t"\
260             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
261             "cbnz %w[tmp], 1b\n\t"\
262             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
263             : [value] "Ir" (v)\
264             : "memory"\
265         );
266 #endif
267 
268         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
269 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
270 
271         return original;
272     }
273 
fetch_subboost::atomics::detail::core_arch_operations274     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
275     {
276         storage_type original;
277 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
278         v = -v;
279 
280 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
281         __asm__ __volatile__\
282         (\
283             "ldadd" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
284             : [storage] "+Q" (storage), [original] "=r" (original)\
285             : [value] "r" (v)\
286             : "memory"\
287         );
288 
289 #else
290         storage_type result;
291         uint32_t tmp;
292 
293 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
294         __asm__ __volatile__\
295         (\
296             "1:\n\t"\
297             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
298             "sub %w[result], %w[original], %w[value]\n\t"\
299             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
300             "cbnz %w[tmp], 1b\n\t"\
301             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
302             : [value] "Ir" (v)\
303             : "memory"\
304         );
305 #endif
306 
307         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
308 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
309 
310         return original;
311     }
312 
fetch_andboost::atomics::detail::core_arch_operations313     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
314     {
315         storage_type original;
316 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
317         v = ~v;
318 
319 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
320         __asm__ __volatile__\
321         (\
322             "ldclr" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
323             : [storage] "+Q" (storage), [original] "=r" (original)\
324             : [value] "r" (v)\
325             : "memory"\
326         );
327 #else
328         storage_type result;
329         uint32_t tmp;
330 
331 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
332         __asm__ __volatile__\
333         (\
334             "1:\n\t"\
335             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
336             "and %w[result], %w[original], %w[value]\n\t"\
337             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
338             "cbnz %w[tmp], 1b\n\t"\
339             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
340             : [value] "Kr" (v)\
341             : "memory"\
342         );
343 #endif
344 
345         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
346 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
347 
348         return original;
349     }
350 
fetch_orboost::atomics::detail::core_arch_operations351     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
352     {
353         storage_type original;
354 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
355 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
356         __asm__ __volatile__\
357         (\
358             "ldset" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
359             : [storage] "+Q" (storage), [original] "=r" (original)\
360             : [value] "r" (v)\
361             : "memory"\
362         );
363 #else
364         storage_type result;
365         uint32_t tmp;
366 
367 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
368         __asm__ __volatile__\
369         (\
370             "1:\n\t"\
371             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
372             "orr %w[result], %w[original], %w[value]\n\t"\
373             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
374             "cbnz %w[tmp], 1b\n\t"\
375             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
376             : [value] "Kr" (v)\
377             : "memory"\
378         );
379 #endif
380 
381         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
382 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
383 
384         return original;
385     }
386 
fetch_xorboost::atomics::detail::core_arch_operations387     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
388     {
389         storage_type original;
390 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
391 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
392         __asm__ __volatile__\
393         (\
394             "ldeor" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
395             : [storage] "+Q" (storage), [original] "=r" (original)\
396             : [value] "r" (v)\
397             : "memory"\
398         );
399 #else
400         storage_type result;
401         uint32_t tmp;
402 
403 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
404         __asm__ __volatile__\
405         (\
406             "1:\n\t"\
407             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
408             "eor %w[result], %w[original], %w[value]\n\t"\
409             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
410             "cbnz %w[tmp], 1b\n\t"\
411             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
412             : [value] "Kr" (v)\
413             : "memory"\
414         );
415 #endif
416 
417         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
418 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
419 
420         return original;
421     }
422 
test_and_setboost::atomics::detail::core_arch_operations423     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
424     {
425         return !!exchange(storage, (storage_type)1, order);
426     }
427 
clearboost::atomics::detail::core_arch_operations428     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
429     {
430         store(storage, (storage_type)0, order);
431     }
432 };
433 
434 template< bool Signed, bool Interprocess >
435 struct core_arch_operations< 2u, Signed, Interprocess > :
436     public core_arch_operations_gcc_aarch64_base
437 {
438     typedef typename storage_traits< 2u >::type storage_type;
439 
440     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
441     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 2u;
442     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
443     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
444 
storeboost::atomics::detail::core_arch_operations445     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
446     {
447         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
448         {
449             __asm__ __volatile__
450             (
451                 "stlrh %w[value], %[storage]\n\t"
452                 : [storage] "=Q" (storage)
453                 : [value] "r" (v)
454                 : "memory"
455             );
456         }
457         else
458         {
459             storage = v;
460         }
461     }
462 
loadboost::atomics::detail::core_arch_operations463     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
464     {
465         storage_type v;
466         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
467         {
468 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
469             if (order == memory_order_consume || order == memory_order_acquire)
470             {
471                 __asm__ __volatile__
472                 (
473                     "ldaprh %w[value], %[storage]\n\t"
474                     : [value] "=r" (v)
475                     : [storage] "Q" (storage)
476                     : "memory"
477                 );
478             }
479             else
480 #endif
481             {
482                 __asm__ __volatile__
483                 (
484                     "ldarh %w[value], %[storage]\n\t"
485                     : [value] "=r" (v)
486                     : [storage] "Q" (storage)
487                     : "memory"
488                 );
489             }
490         }
491         else
492         {
493             v = storage;
494         }
495 
496         return v;
497     }
498 
exchangeboost::atomics::detail::core_arch_operations499     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
500     {
501         storage_type original;
502 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
503 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
504         __asm__ __volatile__\
505         (\
506             "swp" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
507             : [storage] "+Q" (storage), [original] "=r" (original)\
508             : [value] "r" (v)\
509             : "memory"\
510         );
511 #else
512         uint32_t tmp;
513 
514 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
515         __asm__ __volatile__\
516         (\
517             "1:\n\t"\
518             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
519             "st" st_mo "xrh %w[tmp], %w[value], %[storage]\n\t"\
520             "cbnz %w[tmp], 1b\n\t"\
521             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
522             : [value] "r" (v)\
523             : "memory"\
524         );
525 #endif
526 
527         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
528 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
529 
530         return original;
531     }
532 
compare_exchange_weakboost::atomics::detail::core_arch_operations533     static BOOST_FORCEINLINE bool compare_exchange_weak(
534         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
535     {
536         storage_type original;
537 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
538         original = expected;
539 
540 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
541         __asm__ __volatile__\
542         (\
543             "cas" ld_mo st_mo "h %w[original], %w[desired], %[storage]\n\t"\
544             : [storage] "+Q" (storage), [original] "+r" (original)\
545             : [desired] "r" (desired)\
546             : "memory"\
547         );
548 
549         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
550         bool success = original == expected;
551 #else
552         bool success;
553 
554 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
555         __asm__ __volatile__\
556         (\
557             "uxth %w[expected], %w[expected]\n\t"\
558             "mov %w[success], #0\n\t"\
559             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
560             "cmp %w[original], %w[expected]\n\t"\
561             "b.ne 1f\n\t"\
562             "st" st_mo "xrh %w[success], %w[desired], %[storage]\n\t"\
563             "eor %w[success], %w[success], #1\n\t"\
564             "1:\n\t"\
565             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
566             : [desired] "r" (desired), [expected] "r" (expected)\
567             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
568         );
569 
570         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
571 #endif
572 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
573 
574         expected = original;
575         return success;
576     }
577 
compare_exchange_strongboost::atomics::detail::core_arch_operations578     static BOOST_FORCEINLINE bool compare_exchange_strong(
579         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
580     {
581         storage_type original;
582 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
583         original = expected;
584 
585 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
586         __asm__ __volatile__\
587         (\
588             "cas" ld_mo st_mo "h %w[original], %w[desired], %[storage]\n\t"\
589             : [storage] "+Q" (storage), [original] "+r" (original)\
590             : [desired] "r" (desired)\
591             : "memory"\
592         );
593 
594         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
595         bool success = original == expected;
596 #else
597         bool success;
598 
599 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
600         __asm__ __volatile__\
601         (\
602             "uxth %w[expected], %w[expected]\n\t"\
603             "1:\n\t"\
604             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
605             "cmp %w[original], %w[expected]\n\t"\
606             "b.ne 2f\n\t"\
607             "st" st_mo "xrh %w[success], %w[desired], %[storage]\n\t"\
608             "cbnz %w[success], 1b\n\t"\
609             "2:\n\t"\
610             "cset %w[success], eq\n\t"\
611             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
612             : [desired] "r" (desired), [expected] "r" (expected)\
613             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
614         );
615 
616         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
617 #endif
618 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
619 
620         expected = original;
621         return success;
622     }
623 
fetch_addboost::atomics::detail::core_arch_operations624     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
625     {
626         storage_type original;
627 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
628 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
629         __asm__ __volatile__\
630         (\
631             "ldadd" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
632             : [storage] "+Q" (storage), [original] "=r" (original)\
633             : [value] "r" (v)\
634             : "memory"\
635         );
636 #else
637         storage_type result;
638         uint32_t tmp;
639 
640 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
641         __asm__ __volatile__\
642         (\
643             "1:\n\t"\
644             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
645             "add %w[result], %w[original], %w[value]\n\t"\
646             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
647             "cbnz %w[tmp], 1b\n\t"\
648             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
649             : [value] "Ir" (v)\
650             : "memory"\
651         );
652 #endif
653 
654         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
655 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
656 
657         return original;
658     }
659 
fetch_subboost::atomics::detail::core_arch_operations660     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
661     {
662         storage_type original;
663 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
664         v = -v;
665 
666 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
667         __asm__ __volatile__\
668         (\
669             "ldadd" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
670             : [storage] "+Q" (storage), [original] "=r" (original)\
671             : [value] "r" (v)\
672             : "memory"\
673         );
674 
675 #else
676         storage_type result;
677         uint32_t tmp;
678 
679 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
680         __asm__ __volatile__\
681         (\
682             "1:\n\t"\
683             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
684             "sub %w[result], %w[original], %w[value]\n\t"\
685             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
686             "cbnz %w[tmp], 1b\n\t"\
687             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
688             : [value] "Ir" (v)\
689             : "memory"\
690         );
691 #endif
692 
693         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
694 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
695 
696         return original;
697     }
698 
fetch_andboost::atomics::detail::core_arch_operations699     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
700     {
701         storage_type original;
702 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
703         v = ~v;
704 
705 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
706         __asm__ __volatile__\
707         (\
708             "ldclr" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
709             : [storage] "+Q" (storage), [original] "=r" (original)\
710             : [value] "r" (v)\
711             : "memory"\
712         );
713 #else
714         storage_type result;
715         uint32_t tmp;
716 
717 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
718         __asm__ __volatile__\
719         (\
720             "1:\n\t"\
721             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
722             "and %w[result], %w[original], %w[value]\n\t"\
723             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
724             "cbnz %w[tmp], 1b\n\t"\
725             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
726             : [value] "Kr" (v)\
727             : "memory"\
728         );
729 #endif
730 
731         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
732 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
733 
734         return original;
735     }
736 
fetch_orboost::atomics::detail::core_arch_operations737     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
738     {
739         storage_type original;
740 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
741 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
742         __asm__ __volatile__\
743         (\
744             "ldset" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
745             : [storage] "+Q" (storage), [original] "=r" (original)\
746             : [value] "r" (v)\
747             : "memory"\
748         );
749 #else
750         storage_type result;
751         uint32_t tmp;
752 
753 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
754         __asm__ __volatile__\
755         (\
756             "1:\n\t"\
757             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
758             "orr %w[result], %w[original], %w[value]\n\t"\
759             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
760             "cbnz %w[tmp], 1b\n\t"\
761             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
762             : [value] "Kr" (v)\
763             : "memory"\
764         );
765 #endif
766 
767         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
768 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
769 
770         return original;
771     }
772 
fetch_xorboost::atomics::detail::core_arch_operations773     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
774     {
775         storage_type original;
776 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
777 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
778         __asm__ __volatile__\
779         (\
780             "ldeor" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
781             : [storage] "+Q" (storage), [original] "=r" (original)\
782             : [value] "r" (v)\
783             : "memory"\
784         );
785 #else
786         storage_type result;
787         uint32_t tmp;
788 
789 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
790         __asm__ __volatile__\
791         (\
792             "1:\n\t"\
793             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
794             "eor %w[result], %w[original], %w[value]\n\t"\
795             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
796             "cbnz %w[tmp], 1b\n\t"\
797             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
798             : [value] "Kr" (v)\
799             : "memory"\
800         );
801 #endif
802 
803         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
804 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
805 
806         return original;
807     }
808 
test_and_setboost::atomics::detail::core_arch_operations809     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
810     {
811         return !!exchange(storage, (storage_type)1, order);
812     }
813 
clearboost::atomics::detail::core_arch_operations814     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
815     {
816         store(storage, (storage_type)0, order);
817     }
818 };
819 
820 template< bool Signed, bool Interprocess >
821 struct core_arch_operations< 4u, Signed, Interprocess > :
822     public core_arch_operations_gcc_aarch64_base
823 {
824     typedef typename storage_traits< 4u >::type storage_type;
825 
826     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
827     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 4u;
828     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
829     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
830 
storeboost::atomics::detail::core_arch_operations831     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
832     {
833         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
834         {
835             __asm__ __volatile__
836             (
837                 "stlr %w[value], %[storage]\n\t"
838                 : [storage] "=Q" (storage)
839                 : [value] "r" (v)
840                 : "memory"
841             );
842         }
843         else
844         {
845             storage = v;
846         }
847     }
848 
loadboost::atomics::detail::core_arch_operations849     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
850     {
851         storage_type v;
852         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
853         {
854 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
855             if (order == memory_order_consume || order == memory_order_acquire)
856             {
857                 __asm__ __volatile__
858                 (
859                     "ldapr %w[value], %[storage]\n\t"
860                     : [value] "=r" (v)
861                     : [storage] "Q" (storage)
862                     : "memory"
863                 );
864             }
865             else
866 #endif
867             {
868                 __asm__ __volatile__
869                 (
870                     "ldar %w[value], %[storage]\n\t"
871                     : [value] "=r" (v)
872                     : [storage] "Q" (storage)
873                     : "memory"
874                 );
875             }
876         }
877         else
878         {
879             v = storage;
880         }
881 
882         return v;
883     }
884 
exchangeboost::atomics::detail::core_arch_operations885     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
886     {
887         storage_type original;
888 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
889 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
890         __asm__ __volatile__\
891         (\
892             "swp" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
893             : [storage] "+Q" (storage), [original] "=r" (original)\
894             : [value] "r" (v)\
895             : "memory"\
896         );
897 #else
898         uint32_t tmp;
899 
900 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
901         __asm__ __volatile__\
902         (\
903             "1:\n\t"\
904             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
905             "st" st_mo "xr %w[tmp], %w[value], %[storage]\n\t"\
906             "cbnz %w[tmp], 1b\n\t"\
907             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
908             : [value] "r" (v)\
909             : "memory"\
910         );
911 #endif
912 
913         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
914 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
915 
916         return original;
917     }
918 
compare_exchange_weakboost::atomics::detail::core_arch_operations919     static BOOST_FORCEINLINE bool compare_exchange_weak(
920         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
921     {
922         storage_type original;
923 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
924         original = expected;
925 
926 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
927         __asm__ __volatile__\
928         (\
929             "cas" ld_mo st_mo " %w[original], %w[desired], %[storage]\n\t"\
930             : [storage] "+Q" (storage), [original] "+r" (original)\
931             : [desired] "r" (desired)\
932             : "memory"\
933         );
934 
935         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
936         bool success = original == expected;
937 #else
938         bool success;
939 
940 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
941         __asm__ __volatile__\
942         (\
943             "mov %w[success], #0\n\t"\
944             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
945             "cmp %w[original], %w[expected]\n\t"\
946             "b.ne 1f\n\t"\
947             "st" st_mo "xr %w[success], %w[desired], %[storage]\n\t"\
948             "eor %w[success], %w[success], #1\n\t"\
949             "1:\n\t"\
950             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
951             : [desired] "r" (desired), [expected] "Ir" (expected)\
952             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
953         );
954 
955         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
956 #endif
957 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
958 
959         expected = original;
960         return success;
961     }
962 
compare_exchange_strongboost::atomics::detail::core_arch_operations963     static BOOST_FORCEINLINE bool compare_exchange_strong(
964         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
965     {
966         storage_type original;
967 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
968         original = expected;
969 
970 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
971         __asm__ __volatile__\
972         (\
973             "cas" ld_mo st_mo " %w[original], %w[desired], %[storage]\n\t"\
974             : [storage] "+Q" (storage), [original] "+r" (original)\
975             : [desired] "r" (desired)\
976             : "memory"\
977         );
978 
979         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
980         bool success = original == expected;
981 #else
982         bool success;
983 
984 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
985         __asm__ __volatile__\
986         (\
987             "1:\n\t"\
988             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
989             "cmp %w[original], %w[expected]\n\t"\
990             "b.ne 2f\n\t"\
991             "st" st_mo "xr %w[success], %w[desired], %[storage]\n\t"\
992             "cbnz %w[success], 1b\n\t"\
993             "2:\n\t"\
994             "cset %w[success], eq\n\t"\
995             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
996             : [desired] "r" (desired), [expected] "Ir" (expected)\
997             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
998         );
999 
1000         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1001 #endif
1002 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1003 
1004         expected = original;
1005         return success;
1006     }
1007 
fetch_addboost::atomics::detail::core_arch_operations1008     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1009     {
1010         storage_type original;
1011 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1012 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1013         __asm__ __volatile__\
1014         (\
1015             "ldadd" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1016             : [storage] "+Q" (storage), [original] "=r" (original)\
1017             : [value] "r" (v)\
1018             : "memory"\
1019         );
1020 #else
1021         storage_type result;
1022         uint32_t tmp;
1023 
1024 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1025         __asm__ __volatile__\
1026         (\
1027             "1:\n\t"\
1028             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1029             "add %w[result], %w[original], %w[value]\n\t"\
1030             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1031             "cbnz %w[tmp], 1b\n\t"\
1032             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1033             : [value] "Ir" (v)\
1034             : "memory"\
1035         );
1036 #endif
1037 
1038         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1039 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1040 
1041         return original;
1042     }
1043 
fetch_subboost::atomics::detail::core_arch_operations1044     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1045     {
1046         storage_type original;
1047 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1048         v = -v;
1049 
1050 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1051         __asm__ __volatile__\
1052         (\
1053             "ldadd" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1054             : [storage] "+Q" (storage), [original] "=r" (original)\
1055             : [value] "r" (v)\
1056             : "memory"\
1057         );
1058 
1059 #else
1060         storage_type result;
1061         uint32_t tmp;
1062 
1063 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1064         __asm__ __volatile__\
1065         (\
1066             "1:\n\t"\
1067             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1068             "sub %w[result], %w[original], %w[value]\n\t"\
1069             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1070             "cbnz %w[tmp], 1b\n\t"\
1071             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1072             : [value] "Ir" (v)\
1073             : "memory"\
1074         );
1075 #endif
1076 
1077         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1078 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1079 
1080         return original;
1081     }
1082 
fetch_andboost::atomics::detail::core_arch_operations1083     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1084     {
1085         storage_type original;
1086 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1087         v = ~v;
1088 
1089 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1090         __asm__ __volatile__\
1091         (\
1092             "ldclr" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1093             : [storage] "+Q" (storage), [original] "=r" (original)\
1094             : [value] "r" (v)\
1095             : "memory"\
1096         );
1097 #else
1098         storage_type result;
1099         uint32_t tmp;
1100 
1101 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1102         __asm__ __volatile__\
1103         (\
1104             "1:\n\t"\
1105             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1106             "and %w[result], %w[original], %w[value]\n\t"\
1107             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1108             "cbnz %w[tmp], 1b\n\t"\
1109             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1110             : [value] "Kr" (v)\
1111             : "memory"\
1112         );
1113 #endif
1114 
1115         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1116 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1117 
1118         return original;
1119     }
1120 
fetch_orboost::atomics::detail::core_arch_operations1121     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1122     {
1123         storage_type original;
1124 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1125 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1126         __asm__ __volatile__\
1127         (\
1128             "ldset" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1129             : [storage] "+Q" (storage), [original] "=r" (original)\
1130             : [value] "r" (v)\
1131             : "memory"\
1132         );
1133 #else
1134         storage_type result;
1135         uint32_t tmp;
1136 
1137 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1138         __asm__ __volatile__\
1139         (\
1140             "1:\n\t"\
1141             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1142             "orr %w[result], %w[original], %w[value]\n\t"\
1143             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1144             "cbnz %w[tmp], 1b\n\t"\
1145             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1146             : [value] "Kr" (v)\
1147             : "memory"\
1148         );
1149 #endif
1150 
1151         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1152 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1153 
1154         return original;
1155     }
1156 
fetch_xorboost::atomics::detail::core_arch_operations1157     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1158     {
1159         storage_type original;
1160 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1161 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1162         __asm__ __volatile__\
1163         (\
1164             "ldeor" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1165             : [storage] "+Q" (storage), [original] "=r" (original)\
1166             : [value] "r" (v)\
1167             : "memory"\
1168         );
1169 #else
1170         storage_type result;
1171         uint32_t tmp;
1172 
1173 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1174         __asm__ __volatile__\
1175         (\
1176             "1:\n\t"\
1177             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1178             "eor %w[result], %w[original], %w[value]\n\t"\
1179             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1180             "cbnz %w[tmp], 1b\n\t"\
1181             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1182             : [value] "Kr" (v)\
1183             : "memory"\
1184         );
1185 #endif
1186 
1187         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1188 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1189 
1190         return original;
1191     }
1192 
test_and_setboost::atomics::detail::core_arch_operations1193     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1194     {
1195         return !!exchange(storage, (storage_type)1, order);
1196     }
1197 
clearboost::atomics::detail::core_arch_operations1198     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1199     {
1200         store(storage, (storage_type)0, order);
1201     }
1202 };
1203 
1204 template< bool Signed, bool Interprocess >
1205 struct core_arch_operations< 8u, Signed, Interprocess > :
1206     public core_arch_operations_gcc_aarch64_base
1207 {
1208     typedef typename storage_traits< 8u >::type storage_type;
1209 
1210     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
1211     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 8u;
1212     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
1213     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
1214 
storeboost::atomics::detail::core_arch_operations1215     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1216     {
1217         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
1218         {
1219             __asm__ __volatile__
1220             (
1221                 "stlr %x[value], %[storage]\n\t"
1222                 : [storage] "=Q" (storage)
1223                 : [value] "r" (v)
1224                 : "memory"
1225             );
1226         }
1227         else
1228         {
1229             storage = v;
1230         }
1231     }
1232 
loadboost::atomics::detail::core_arch_operations1233     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
1234     {
1235         storage_type v;
1236         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
1237         {
1238 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
1239             if (order == memory_order_consume || order == memory_order_acquire)
1240             {
1241                 __asm__ __volatile__
1242                 (
1243                     "ldapr %x[value], %[storage]\n\t"
1244                     : [value] "=r" (v)
1245                     : [storage] "Q" (storage)
1246                     : "memory"
1247                 );
1248             }
1249             else
1250 #endif
1251             {
1252                 __asm__ __volatile__
1253                 (
1254                     "ldar %x[value], %[storage]\n\t"
1255                     : [value] "=r" (v)
1256                     : [storage] "Q" (storage)
1257                     : "memory"
1258                 );
1259             }
1260         }
1261         else
1262         {
1263             v = storage;
1264         }
1265 
1266         return v;
1267     }
1268 
exchangeboost::atomics::detail::core_arch_operations1269     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1270     {
1271         storage_type original;
1272 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1273 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1274         __asm__ __volatile__\
1275         (\
1276             "swp" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1277             : [storage] "+Q" (storage), [original] "=r" (original)\
1278             : [value] "r" (v)\
1279             : "memory"\
1280         );
1281 #else
1282         uint32_t tmp;
1283 
1284 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1285         __asm__ __volatile__\
1286         (\
1287             "1:\n\t"\
1288             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1289             "st" st_mo "xr %w[tmp], %x[value], %[storage]\n\t"\
1290             "cbnz %w[tmp], 1b\n\t"\
1291             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
1292             : [value] "r" (v)\
1293             : "memory"\
1294         );
1295 #endif
1296 
1297         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1298 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1299 
1300         return original;
1301     }
1302 
compare_exchange_weakboost::atomics::detail::core_arch_operations1303     static BOOST_FORCEINLINE bool compare_exchange_weak(
1304         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1305     {
1306         storage_type original;
1307 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1308         original = expected;
1309 
1310 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1311         __asm__ __volatile__\
1312         (\
1313             "cas" ld_mo st_mo " %x[original], %x[desired], %[storage]\n\t"\
1314             : [storage] "+Q" (storage), [original] "+r" (original)\
1315             : [desired] "r" (desired)\
1316             : "memory"\
1317         );
1318 
1319         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1320         bool success = original == expected;
1321 #else
1322         bool success;
1323 
1324 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1325         __asm__ __volatile__\
1326         (\
1327             "mov %w[success], #0\n\t"\
1328             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1329             "cmp %x[original], %x[expected]\n\t"\
1330             "b.ne 1f\n\t"\
1331             "st" st_mo "xr %w[success], %x[desired], %[storage]\n\t"\
1332             "eor %w[success], %w[success], #1\n\t"\
1333             "1:\n\t"\
1334             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
1335             : [desired] "r" (desired), [expected] "Ir" (expected)\
1336             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1337         );
1338 
1339         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1340 #endif
1341 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1342 
1343         expected = original;
1344         return success;
1345     }
1346 
compare_exchange_strongboost::atomics::detail::core_arch_operations1347     static BOOST_FORCEINLINE bool compare_exchange_strong(
1348         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1349     {
1350         storage_type original;
1351 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1352         original = expected;
1353 
1354 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1355         __asm__ __volatile__\
1356         (\
1357             "cas" ld_mo st_mo " %x[original], %x[desired], %[storage]\n\t"\
1358             : [storage] "+Q" (storage), [original] "+r" (original)\
1359             : [desired] "r" (desired)\
1360             : "memory"\
1361         );
1362 
1363         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1364         bool success = original == expected;
1365 #else
1366         bool success;
1367 
1368 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1369         __asm__ __volatile__\
1370         (\
1371             "1:\n\t"\
1372             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1373             "cmp %x[original], %x[expected]\n\t"\
1374             "b.ne 2f\n\t"\
1375             "st" st_mo "xr %w[success], %x[desired], %[storage]\n\t"\
1376             "cbnz %w[success], 1b\n\t"\
1377             "2:\n\t"\
1378             "cset %w[success], eq\n\t"\
1379             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
1380             : [desired] "r" (desired), [expected] "Ir" (expected)\
1381             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1382         );
1383 
1384         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1385 #endif
1386 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1387 
1388         expected = original;
1389         return success;
1390     }
1391 
fetch_addboost::atomics::detail::core_arch_operations1392     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1393     {
1394         storage_type original;
1395 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1396 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1397         __asm__ __volatile__\
1398         (\
1399             "ldadd" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1400             : [storage] "+Q" (storage), [original] "=r" (original)\
1401             : [value] "r" (v)\
1402             : "memory"\
1403         );
1404 #else
1405         storage_type result;
1406         uint32_t tmp;
1407 
1408 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1409         __asm__ __volatile__\
1410         (\
1411             "1:\n\t"\
1412             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1413             "add %x[result], %x[original], %x[value]\n\t"\
1414             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1415             "cbnz %w[tmp], 1b\n\t"\
1416             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1417             : [value] "Ir" (v)\
1418             : "memory"\
1419         );
1420 #endif
1421 
1422         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1423 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1424 
1425         return original;
1426     }
1427 
fetch_subboost::atomics::detail::core_arch_operations1428     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1429     {
1430         storage_type original;
1431 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1432         v = -v;
1433 
1434 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1435         __asm__ __volatile__\
1436         (\
1437             "ldadd" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1438             : [storage] "+Q" (storage), [original] "=r" (original)\
1439             : [value] "r" (v)\
1440             : "memory"\
1441         );
1442 
1443 #else
1444         storage_type result;
1445         uint32_t tmp;
1446 
1447 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1448         __asm__ __volatile__\
1449         (\
1450             "1:\n\t"\
1451             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1452             "sub %x[result], %x[original], %x[value]\n\t"\
1453             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1454             "cbnz %w[tmp], 1b\n\t"\
1455             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1456             : [value] "Ir" (v)\
1457             : "memory"\
1458         );
1459 #endif
1460 
1461         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1462 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1463 
1464         return original;
1465     }
1466 
fetch_andboost::atomics::detail::core_arch_operations1467     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1468     {
1469         storage_type original;
1470 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1471         v = ~v;
1472 
1473 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1474         __asm__ __volatile__\
1475         (\
1476             "ldclr" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1477             : [storage] "+Q" (storage), [original] "=r" (original)\
1478             : [value] "r" (v)\
1479             : "memory"\
1480         );
1481 #else
1482         storage_type result;
1483         uint32_t tmp;
1484 
1485 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1486         __asm__ __volatile__\
1487         (\
1488             "1:\n\t"\
1489             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1490             "and %x[result], %x[original], %x[value]\n\t"\
1491             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1492             "cbnz %w[tmp], 1b\n\t"\
1493             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1494             : [value] "Lr" (v)\
1495             : "memory"\
1496         );
1497 #endif
1498 
1499         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1500 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1501 
1502         return original;
1503     }
1504 
fetch_orboost::atomics::detail::core_arch_operations1505     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1506     {
1507         storage_type original;
1508 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1509 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1510         __asm__ __volatile__\
1511         (\
1512             "ldset" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1513             : [storage] "+Q" (storage), [original] "=r" (original)\
1514             : [value] "r" (v)\
1515             : "memory"\
1516         );
1517 #else
1518         storage_type result;
1519         uint32_t tmp;
1520 
1521 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1522         __asm__ __volatile__\
1523         (\
1524             "1:\n\t"\
1525             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1526             "orr %x[result], %x[original], %x[value]\n\t"\
1527             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1528             "cbnz %w[tmp], 1b\n\t"\
1529             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1530             : [value] "Lr" (v)\
1531             : "memory"\
1532         );
1533 #endif
1534 
1535         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1536 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1537 
1538         return original;
1539     }
1540 
fetch_xorboost::atomics::detail::core_arch_operations1541     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1542     {
1543         storage_type original;
1544 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1545 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1546         __asm__ __volatile__\
1547         (\
1548             "ldeor" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1549             : [storage] "+Q" (storage), [original] "=r" (original)\
1550             : [value] "r" (v)\
1551             : "memory"\
1552         );
1553 #else
1554         storage_type result;
1555         uint32_t tmp;
1556 
1557 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1558         __asm__ __volatile__\
1559         (\
1560             "1:\n\t"\
1561             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1562             "eor %x[result], %x[original], %x[value]\n\t"\
1563             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1564             "cbnz %w[tmp], 1b\n\t"\
1565             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1566             : [value] "Lr" (v)\
1567             : "memory"\
1568         );
1569 #endif
1570 
1571         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1572 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1573 
1574         return original;
1575     }
1576 
test_and_setboost::atomics::detail::core_arch_operations1577     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1578     {
1579         return !!exchange(storage, (storage_type)1, order);
1580     }
1581 
clearboost::atomics::detail::core_arch_operations1582     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1583     {
1584         store(storage, (storage_type)0, order);
1585     }
1586 };
1587 
1588 // For 128-bit atomic operations we always have to use ldxp+stxp (optionally, with acquire/release semantics), even in load and store operations.
1589 // ARM Architecture Reference Manual Armv8, for Armv8-A architecture profile, Section B2.2.1 "Requirements for single-copy atomicity"
1590 // specifies that ldxp does not guarantee an atomic load, and we have to perform ldxp+stxp loop to ensure that the loaded value
1591 // is consistent with a previous atomic store.
1592 //
1593 // The ldxp and stxp instructions operate on pairs of registers, meaning that each load loads two integers from memory in
1594 // successive address order, to the first and second registers in the pair, respectively, and store similarly stores two integers.
1595 // The order of these integers does not depend on the active endianness mode (although the byte order in the integers themselves
1596 // obviously does depend on endianness). This means we need to account for the current endianness mode ourselves, where it matters.
1597 //
1598 // Unlike AArch32/A32 or ARMv7, ldxp/stxp do not require adjacent even+odd registers in the pair and accept any two different
1599 // registers. Still, it may be more preferable to select the adjacent registers as 128-bit objects are represented by two adjacent
1600 // registers in the ABI. Unfortunately, clang 10 and probably older doesn't seem to support allocating register pairs in the asm blocks,
1601 // like in ARMv7. For now we use a union to convert between a pair of 64-bit elements and 128-bit storage.
1602 
1603 template< bool Signed, bool Interprocess >
1604 struct core_arch_operations< 16u, Signed, Interprocess > :
1605     public core_arch_operations_gcc_aarch64_base
1606 {
1607     typedef typename storage_traits< 16u >::type storage_type;
1608 
1609     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 16u;
1610     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 16u;
1611     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
1612     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
1613 
1614     // Union to convert between two 64-bit registers and a 128-bit storage
1615     union storage_union
1616     {
1617         storage_type as_storage;
1618         uint64_t as_uint64[2u];
1619     };
1620 
storeboost::atomics::detail::core_arch_operations1621     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1622     {
1623         exchange(storage, v, order);
1624     }
1625 
loadboost::atomics::detail::core_arch_operations1626     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
1627     {
1628         storage_union v;
1629         uint32_t tmp;
1630         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
1631         {
1632             __asm__ __volatile__
1633             (
1634                 "1:\n\t"
1635                 "ldaxp %x[value_0], %x[value_1], %[storage]\n\t"
1636                 "stxp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"
1637                 "cbnz %w[tmp], 1b\n\t"
1638                 : [tmp] "=&r" (tmp), [value_0] "=&r" (v.as_uint64[0u]), [value_1] "=&r" (v.as_uint64[1u])
1639                 : [storage] "Q" (storage)
1640                 : "memory"
1641             );
1642         }
1643         else
1644         {
1645             __asm__ __volatile__
1646             (
1647                 "1:\n\t"
1648                 "ldxp %x[value_0], %x[value_1], %[storage]\n\t"
1649                 "stxp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"
1650                 "cbnz %w[tmp], 1b\n\t"
1651                 : [tmp] "=&r" (tmp), [value_0] "=&r" (v.as_uint64[0u]), [value_1] "=&r" (v.as_uint64[1u])
1652                 : [storage] "Q" (storage)
1653                 : "memory"
1654             );
1655         }
1656 
1657         return v.as_storage;
1658     }
1659 
exchangeboost::atomics::detail::core_arch_operations1660     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1661     {
1662         storage_union original;
1663         storage_union value = { v };
1664         uint32_t tmp;
1665 
1666 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1667         __asm__ __volatile__\
1668         (\
1669             "1:\n\t"\
1670             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1671             "st" st_mo "xp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"\
1672             "cbnz %w[tmp], 1b\n\t"\
1673             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1674             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1675             : "memory"\
1676         );
1677 
1678         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1679 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1680 
1681         return original.as_storage;
1682     }
1683 
compare_exchange_weakboost::atomics::detail::core_arch_operations1684     static BOOST_FORCEINLINE bool compare_exchange_weak(
1685         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1686     {
1687         storage_union original;
1688         storage_union e = { expected };
1689         storage_union d = { desired };
1690         bool success;
1691 
1692 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1693         __asm__ __volatile__\
1694         (\
1695             "mov %w[success], #0\n\t"\
1696             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1697             "cmp %x[original_0], %x[expected_0]\n\t"\
1698             "ccmp %x[original_1], %x[expected_1], #0, eq\n\t"\
1699             "b.ne 1f\n\t"\
1700             "st" st_mo "xp %w[success], %x[desired_0], %x[desired_1], %[storage]\n\t"\
1701             "eor %w[success], %w[success], #1\n\t"\
1702             "1:\n\t"\
1703             : [success] "=&r" (success), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1704             : [desired_0] "r" (d.as_uint64[0u]), [desired_1] "r" (d.as_uint64[1u]), [expected_0] "r" (e.as_uint64[0u]), [expected_1] "r" (e.as_uint64[1u])\
1705             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1706         );
1707 
1708         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1709 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1710 
1711         expected = original.as_storage;
1712         return success;
1713     }
1714 
compare_exchange_strongboost::atomics::detail::core_arch_operations1715     static BOOST_FORCEINLINE bool compare_exchange_strong(
1716         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1717     {
1718         storage_union original;
1719         storage_union e = { expected };
1720         storage_union d = { desired };
1721         bool success;
1722 
1723 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1724         __asm__ __volatile__\
1725         (\
1726             "1:\n\t"\
1727             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1728             "cmp %x[original_0], %x[expected_0]\n\t"\
1729             "ccmp %x[original_1], %x[expected_1], #0, eq\n\t"\
1730             "b.ne 2f\n\t"\
1731             "st" st_mo "xp %w[success], %x[desired_0], %x[desired_1], %[storage]\n\t"\
1732             "cbnz %w[success], 1b\n\t"\
1733             "2:\n\t"\
1734             "cset %w[success], eq\n\t"\
1735             : [success] "=&r" (success), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1736             : [desired_0] "r" (d.as_uint64[0u]), [desired_1] "r" (d.as_uint64[1u]), [expected_0] "r" (e.as_uint64[0u]), [expected_1] "r" (e.as_uint64[1u])\
1737             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1738         );
1739 
1740         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1741 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1742 
1743         expected = original.as_storage;
1744         return success;
1745     }
1746 
fetch_addboost::atomics::detail::core_arch_operations1747     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1748     {
1749         storage_union original;
1750         storage_union value = { v };
1751         storage_union result;
1752         uint32_t tmp;
1753 
1754 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1755         __asm__ __volatile__\
1756         (\
1757             "1:\n\t"\
1758             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1759             "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1760             "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1761             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1762             "cbnz %w[tmp], 1b\n\t"\
1763             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1764               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1765               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1766             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1767             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1768         );
1769 
1770         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1771 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1772 
1773         return original.as_storage;
1774     }
1775 
fetch_subboost::atomics::detail::core_arch_operations1776     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1777     {
1778         storage_union original;
1779         storage_union value = { v };
1780         storage_union result;
1781         uint32_t tmp;
1782 
1783 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1784         __asm__ __volatile__\
1785         (\
1786             "1:\n\t"\
1787             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1788             "subs %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1789             "sbc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1790             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1791             "cbnz %w[tmp], 1b\n\t"\
1792             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1793               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1794               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1795             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1796             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1797         );
1798 
1799         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1800 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1801 
1802         return original.as_storage;
1803     }
1804 
fetch_andboost::atomics::detail::core_arch_operations1805     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1806     {
1807         storage_union original;
1808         storage_union value = { v };
1809         storage_union result;
1810         uint32_t tmp;
1811 
1812 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1813         __asm__ __volatile__\
1814         (\
1815             "1:\n\t"\
1816             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1817             "and %x[result_0], %x[original_0], %x[value_0]\n\t"\
1818             "and %x[result_1], %x[original_1], %x[value_1]\n\t"\
1819             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1820             "cbnz %w[tmp], 1b\n\t"\
1821             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1822               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1823               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1824             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1825             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1826         );
1827 
1828         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1829 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1830 
1831         return original.as_storage;
1832     }
1833 
fetch_orboost::atomics::detail::core_arch_operations1834     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1835     {
1836         storage_union original;
1837         storage_union value = { v };
1838         storage_union result;
1839         uint32_t tmp;
1840 
1841 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1842         __asm__ __volatile__\
1843         (\
1844             "1:\n\t"\
1845             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1846             "orr %x[result_0], %x[original_0], %x[value_0]\n\t"\
1847             "orr %x[result_1], %x[original_1], %x[value_1]\n\t"\
1848             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1849             "cbnz %w[tmp], 1b\n\t"\
1850             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1851               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1852               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1853             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1854             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1855         );
1856 
1857         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1858 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1859 
1860         return original.as_storage;
1861     }
1862 
fetch_xorboost::atomics::detail::core_arch_operations1863     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1864     {
1865         storage_union original;
1866         storage_union value = { v };
1867         storage_union result;
1868         uint32_t tmp;
1869 
1870 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1871         __asm__ __volatile__\
1872         (\
1873             "1:\n\t"\
1874             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1875             "eor %x[result_0], %x[original_0], %x[value_0]\n\t"\
1876             "eor %x[result_1], %x[original_1], %x[value_1]\n\t"\
1877             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1878             "cbnz %w[tmp], 1b\n\t"\
1879             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1880               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1881               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1882             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1883             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1884         );
1885 
1886         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1887 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1888 
1889         return original.as_storage;
1890     }
1891 
test_and_setboost::atomics::detail::core_arch_operations1892     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1893     {
1894         return !!exchange(storage, (storage_type)1, order);
1895     }
1896 
clearboost::atomics::detail::core_arch_operations1897     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1898     {
1899         store(storage, (storage_type)0, order);
1900     }
1901 };
1902 
1903 } // namespace detail
1904 } // namespace atomics
1905 } // namespace boost
1906 
1907 #include <boost/atomic/detail/footer.hpp>
1908 
1909 #endif // BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_
1910