1 /*
2  * Distributed under the Boost Software License, Version 1.0.
3  * (See accompanying file LICENSE_1_0.txt or copy at
4  * http://www.boost.org/LICENSE_1_0.txt)
5  *
6  * Copyright (c) 2017 - 2018 Andrey Semashev
7  */
8 /*!
9  * \file   atomic/detail/extra_ops_gcc_arm.hpp
10  *
11  * This header contains implementation of the extra atomic operations for ARM.
12  */
13 
14 #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
15 #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
16 
17 #include <cstddef>
18 #include <boost/cstdint.hpp>
19 #include <boost/memory_order.hpp>
20 #include <boost/atomic/detail/config.hpp>
21 #include <boost/atomic/detail/platform.hpp>
22 #include <boost/atomic/detail/storage_traits.hpp>
23 #include <boost/atomic/detail/extra_operations_fwd.hpp>
24 #include <boost/atomic/detail/extra_ops_generic.hpp>
25 #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
26 #include <boost/atomic/detail/gcc_arm_asm_common.hpp>
27 #include <boost/atomic/detail/capabilities.hpp>
28 #include <boost/atomic/detail/header.hpp>
29 
30 #ifdef BOOST_HAS_PRAGMA_ONCE
31 #pragma once
32 #endif
33 
34 namespace boost {
35 namespace atomics {
36 namespace detail {
compare_fcn(const GFC_UINTEGER_4 * a,const GFC_UINTEGER_4 * b,gfc_charlen_type n)37 
38 template< typename Base >
39 struct extra_operations_gcc_arm_common :
40     public Base
41 {
42     typedef Base base_type;
43     typedef typename base_type::storage_type storage_type;
44 
45     static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
46     {
47         base_type::fetch_negate(storage, order);
48     }
49 
50     static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
51     {
52         base_type::fetch_complement(storage, order);
53     }
54 
55     static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
56     {
57         return !!base_type::negate(storage, order);
58     }
59 
60     static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
61     {
62         return !!base_type::add(storage, v, order);
63     }
64 
65     static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
66     {
67         return !!base_type::sub(storage, v, order);
68     }
69 
70     static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
71     {
72         return !!base_type::bitwise_and(storage, v, order);
73     }
74 
75     static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
76     {
77         return !!base_type::bitwise_or(storage, v, order);
78     }
79 
80     static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
81     {
82         return !!base_type::bitwise_xor(storage, v, order);
83     }
84 
85     static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
86     {
87         return !!base_type::bitwise_complement(storage, order);
88     }
89 };
90 
91 template< typename Base, std::size_t Size, bool Signed >
92 struct extra_operations_gcc_arm;
93 
94 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
95 
96 template< typename Base, bool Signed >
97 struct extra_operations_gcc_arm< Base, 1u, Signed > :
98     public extra_operations_generic< Base, 1u, Signed >
99 {
100     typedef extra_operations_generic< Base, 1u, Signed > base_type;
101     typedef typename base_type::storage_type storage_type;
102     typedef typename storage_traits< 4u >::type extended_storage_type;
103 
104     static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
105     {
106         core_arch_operations_gcc_arm_base::fence_before(order);
107         uint32_t tmp;
108         extended_storage_type original, result;
109         __asm__ __volatile__
110         (
111             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
112             "1:\n\t"
113             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
114             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
115             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
116             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
117             "bne      1b\n\t"                                // if (!flags.equal) goto retry
118             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
119             : [original] "=&r" (original),  // %0
120               [result] "=&r" (result),      // %1
121               [tmp] "=&l" (tmp),            // %2
122               [storage] "+Q" (storage)      // %3
123             :
124             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
125         );
126         core_arch_operations_gcc_arm_base::fence_after(order);
127         return static_cast< storage_type >(original);
128     }
129 
130     static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
131     {
132         core_arch_operations_gcc_arm_base::fence_before(order);
133         uint32_t tmp;
134         extended_storage_type original, result;
135         __asm__ __volatile__
136         (
137             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
138             "1:\n\t"
139             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
140             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
141             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
142             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
143             "bne      1b\n\t"                                // if (!flags.equal) goto retry
144             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
145             : [original] "=&r" (original),  // %0
146               [result] "=&r" (result),      // %1
147               [tmp] "=&l" (tmp),            // %2
148               [storage] "+Q" (storage)      // %3
149             :
150             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
151         );
152         core_arch_operations_gcc_arm_base::fence_after(order);
153         return static_cast< storage_type >(result);
154     }
155 
156     static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
157     {
158         core_arch_operations_gcc_arm_base::fence_before(order);
159         uint32_t tmp;
160         extended_storage_type original, result;
161         __asm__ __volatile__
162         (
163             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
164             "1:\n\t"
165             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
166             "add      %[result], %[original], %[value]\n\t"  // result = original + value
167             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
168             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
169             "bne      1b\n\t"                                // if (!flags.equal) goto retry
170             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
171             : [original] "=&r" (original),  // %0
172               [result] "=&r" (result),      // %1
173               [tmp] "=&l" (tmp),            // %2
174               [storage] "+Q" (storage)      // %3
175             : [value] "Ir" (v)              // %4
176             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
177         );
178         core_arch_operations_gcc_arm_base::fence_after(order);
179         return static_cast< storage_type >(result);
180     }
181 
182     static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
183     {
184         core_arch_operations_gcc_arm_base::fence_before(order);
185         uint32_t tmp;
186         extended_storage_type original, result;
187         __asm__ __volatile__
188         (
189             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
190             "1:\n\t"
191             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
192             "sub      %[result], %[original], %[value]\n\t"  // result = original - value
193             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
194             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
195             "bne      1b\n\t"                                // if (!flags.equal) goto retry
196             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
197             : [original] "=&r" (original),  // %0
198               [result] "=&r" (result),      // %1
199               [tmp] "=&l" (tmp),            // %2
200               [storage] "+Q" (storage)      // %3
201             : [value] "Ir" (v)              // %4
202             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
203         );
204         core_arch_operations_gcc_arm_base::fence_after(order);
205         return static_cast< storage_type >(result);
206     }
207 
208     static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
209     {
210         core_arch_operations_gcc_arm_base::fence_before(order);
211         uint32_t tmp;
212         extended_storage_type original, result;
213         __asm__ __volatile__
214         (
215             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
216             "1:\n\t"
217             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
218             "and      %[result], %[original], %[value]\n\t"  // result = original & value
219             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
220             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
221             "bne      1b\n\t"                                // if (!flags.equal) goto retry
mmaxloc1_4_s4(gfc_array_i4 * const restrict retarray,gfc_array_s4 * const restrict array,const index_type * const restrict pdim,gfc_array_l1 * const restrict mask,GFC_LOGICAL_4 back,gfc_charlen_type string_len)222             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
223             : [original] "=&r" (original),  // %0
224               [result] "=&r" (result),      // %1
225               [tmp] "=&l" (tmp),            // %2
226               [storage] "+Q" (storage)      // %3
227             : [value] "Ir" (v)              // %4
228             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
229         );
230         core_arch_operations_gcc_arm_base::fence_after(order);
231         return static_cast< storage_type >(result);
232     }
233 
234     static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
235     {
236         core_arch_operations_gcc_arm_base::fence_before(order);
237         uint32_t tmp;
238         extended_storage_type original, result;
239         __asm__ __volatile__
240         (
241             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
242             "1:\n\t"
243             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
244             "orr      %[result], %[original], %[value]\n\t"  // result = original | value
245             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
246             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
247             "bne      1b\n\t"                                // if (!flags.equal) goto retry
248             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
249             : [original] "=&r" (original),  // %0
250               [result] "=&r" (result),      // %1
251               [tmp] "=&l" (tmp),            // %2
252               [storage] "+Q" (storage)      // %3
253             : [value] "Ir" (v)              // %4
254             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
255         );
256         core_arch_operations_gcc_arm_base::fence_after(order);
257         return static_cast< storage_type >(result);
258     }
259 
260     static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
261     {
262         core_arch_operations_gcc_arm_base::fence_before(order);
263         uint32_t tmp;
264         extended_storage_type original, result;
265         __asm__ __volatile__
266         (
267             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
268             "1:\n\t"
269             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
270             "eor      %[result], %[original], %[value]\n\t"  // result = original ^ value
271             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
272             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
273             "bne      1b\n\t"                                // if (!flags.equal) goto retry
274             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
275             : [original] "=&r" (original),  // %0
276               [result] "=&r" (result),      // %1
277               [tmp] "=&l" (tmp),            // %2
278               [storage] "+Q" (storage)      // %3
279             : [value] "Ir" (v)              // %4
280             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
281         );
282         core_arch_operations_gcc_arm_base::fence_after(order);
283         return static_cast< storage_type >(result);
284     }
285 
286     static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
287     {
288         core_arch_operations_gcc_arm_base::fence_before(order);
289         uint32_t tmp;
290         extended_storage_type original, result;
291         __asm__ __volatile__
292         (
293             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
294             "1:\n\t"
295             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
296             "mvn      %[result], %[original]\n\t"            // result = NOT original
297             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
298             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
299             "bne      1b\n\t"                                // if (!flags.equal) goto retry
300             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
301             : [original] "=&r" (original),  // %0
302               [result] "=&r" (result),      // %1
303               [tmp] "=&l" (tmp),            // %2
304               [storage] "+Q" (storage)      // %3
305             :
306             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
307         );
308         core_arch_operations_gcc_arm_base::fence_after(order);
309         return static_cast< storage_type >(original);
310     }
311 
312     static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
313     {
314         core_arch_operations_gcc_arm_base::fence_before(order);
315         uint32_t tmp;
316         extended_storage_type original, result;
317         __asm__ __volatile__
318         (
319             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
320             "1:\n\t"
321             "ldrexb   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
322             "mvn      %[result], %[original]\n\t"            // result = NOT original
323             "strexb   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
324             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
325             "bne      1b\n\t"                                // if (!flags.equal) goto retry
326             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
327             : [original] "=&r" (original),  // %0
328               [result] "=&r" (result),      // %1
329               [tmp] "=&l" (tmp),            // %2
330               [storage] "+Q" (storage)      // %3
331             :
332             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
333         );
334         core_arch_operations_gcc_arm_base::fence_after(order);
335         return static_cast< storage_type >(result);
336     }
337 };
338 
339 template< typename Base, bool Signed >
340 struct extra_operations< Base, 1u, Signed, true > :
341     public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 1u, Signed > >
342 {
343 };
344 
345 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
346 
347 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
348 
349 template< typename Base, bool Signed >
350 struct extra_operations_gcc_arm< Base, 2u, Signed > :
351     public extra_operations_generic< Base, 2u, Signed >
352 {
353     typedef extra_operations_generic< Base, 2u, Signed > base_type;
354     typedef typename base_type::storage_type storage_type;
355     typedef typename storage_traits< 4u >::type extended_storage_type;
356 
357     static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
358     {
359         core_arch_operations_gcc_arm_base::fence_before(order);
360         uint32_t tmp;
361         extended_storage_type original, result;
362         __asm__ __volatile__
363         (
364             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
365             "1:\n\t"
366             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
367             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
368             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
369             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
370             "bne      1b\n\t"                                // if (!flags.equal) goto retry
371             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
372             : [original] "=&r" (original),  // %0
373               [result] "=&r" (result),      // %1
374               [tmp] "=&l" (tmp),            // %2
375               [storage] "+Q" (storage)      // %3
376             :
377             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
378         );
379         core_arch_operations_gcc_arm_base::fence_after(order);
380         return static_cast< storage_type >(original);
381     }
382 
383     static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
384     {
385         core_arch_operations_gcc_arm_base::fence_before(order);
386         uint32_t tmp;
387         extended_storage_type original, result;
388         __asm__ __volatile__
389         (
390             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
391             "1:\n\t"
392             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
393             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
394             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
395             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
396             "bne      1b\n\t"                                // if (!flags.equal) goto retry
397             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
398             : [original] "=&r" (original),  // %0
399               [result] "=&r" (result),      // %1
400               [tmp] "=&l" (tmp),            // %2
401               [storage] "+Q" (storage)      // %3
402             :
403             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
404         );
405         core_arch_operations_gcc_arm_base::fence_after(order);
406         return static_cast< storage_type >(result);
407     }
408 
409     static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
410     {
411         core_arch_operations_gcc_arm_base::fence_before(order);
412         uint32_t tmp;
413         extended_storage_type original, result;
414         __asm__ __volatile__
415         (
416             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
417             "1:\n\t"
418             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
419             "add      %[result], %[original], %[value]\n\t"  // result = original + value
420             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
421             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
422             "bne      1b\n\t"                                // if (!flags.equal) goto retry
423             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
424             : [original] "=&r" (original),  // %0
425               [result] "=&r" (result),      // %1
426               [tmp] "=&l" (tmp),            // %2
427               [storage] "+Q" (storage)      // %3
428             : [value] "Ir" (v)              // %4
429             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
430         );
431         core_arch_operations_gcc_arm_base::fence_after(order);
432         return static_cast< storage_type >(result);
433     }
434 
smaxloc1_4_s4(gfc_array_i4 * const restrict retarray,gfc_array_s4 * const restrict array,const index_type * const restrict pdim,GFC_LOGICAL_4 * mask,GFC_LOGICAL_4 back,gfc_charlen_type string_len)435     static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
436     {
437         core_arch_operations_gcc_arm_base::fence_before(order);
438         uint32_t tmp;
439         extended_storage_type original, result;
440         __asm__ __volatile__
441         (
442             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
443             "1:\n\t"
444             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
445             "sub      %[result], %[original], %[value]\n\t"  // result = original - value
446             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
447             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
448             "bne      1b\n\t"                                // if (!flags.equal) goto retry
449             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
450             : [original] "=&r" (original),  // %0
451               [result] "=&r" (result),      // %1
452               [tmp] "=&l" (tmp),            // %2
453               [storage] "+Q" (storage)      // %3
454             : [value] "Ir" (v)              // %4
455             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
456         );
457         core_arch_operations_gcc_arm_base::fence_after(order);
458         return static_cast< storage_type >(result);
459     }
460 
461     static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
462     {
463         core_arch_operations_gcc_arm_base::fence_before(order);
464         uint32_t tmp;
465         extended_storage_type original, result;
466         __asm__ __volatile__
467         (
468             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
469             "1:\n\t"
470             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
471             "and      %[result], %[original], %[value]\n\t"  // result = original & value
472             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
473             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
474             "bne      1b\n\t"                                // if (!flags.equal) goto retry
475             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
476             : [original] "=&r" (original),  // %0
477               [result] "=&r" (result),      // %1
478               [tmp] "=&l" (tmp),            // %2
479               [storage] "+Q" (storage)      // %3
480             : [value] "Ir" (v)              // %4
481             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
482         );
483         core_arch_operations_gcc_arm_base::fence_after(order);
484         return static_cast< storage_type >(result);
485     }
486 
487     static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
488     {
489         core_arch_operations_gcc_arm_base::fence_before(order);
490         uint32_t tmp;
491         extended_storage_type original, result;
492         __asm__ __volatile__
493         (
494             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
495             "1:\n\t"
496             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
497             "orr      %[result], %[original], %[value]\n\t"  // result = original | value
498             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
499             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
500             "bne      1b\n\t"                                // if (!flags.equal) goto retry
501             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
502             : [original] "=&r" (original),  // %0
503               [result] "=&r" (result),      // %1
504               [tmp] "=&l" (tmp),            // %2
505               [storage] "+Q" (storage)      // %3
506             : [value] "Ir" (v)              // %4
507             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
508         );
509         core_arch_operations_gcc_arm_base::fence_after(order);
510         return static_cast< storage_type >(result);
511     }
512 
513     static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
514     {
515         core_arch_operations_gcc_arm_base::fence_before(order);
516         uint32_t tmp;
517         extended_storage_type original, result;
518         __asm__ __volatile__
519         (
520             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
521             "1:\n\t"
522             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
523             "eor      %[result], %[original], %[value]\n\t"  // result = original ^ value
524             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
525             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
526             "bne      1b\n\t"                                // if (!flags.equal) goto retry
527             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
528             : [original] "=&r" (original),  // %0
529               [result] "=&r" (result),      // %1
530               [tmp] "=&l" (tmp),            // %2
531               [storage] "+Q" (storage)      // %3
532             : [value] "Ir" (v)              // %4
533             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
534         );
535         core_arch_operations_gcc_arm_base::fence_after(order);
536         return static_cast< storage_type >(result);
537     }
538 
539     static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
540     {
541         core_arch_operations_gcc_arm_base::fence_before(order);
542         uint32_t tmp;
543         extended_storage_type original, result;
544         __asm__ __volatile__
545         (
546             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
547             "1:\n\t"
548             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
549             "mvn      %[result], %[original]\n\t"            // result = NOT original
550             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
551             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
552             "bne      1b\n\t"                                // if (!flags.equal) goto retry
553             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
554             : [original] "=&r" (original),  // %0
555               [result] "=&r" (result),      // %1
556               [tmp] "=&l" (tmp),            // %2
557               [storage] "+Q" (storage)      // %3
558             :
559             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
560         );
561         core_arch_operations_gcc_arm_base::fence_after(order);
562         return static_cast< storage_type >(original);
563     }
564 
565     static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
566     {
567         core_arch_operations_gcc_arm_base::fence_before(order);
568         uint32_t tmp;
569         extended_storage_type original, result;
570         __asm__ __volatile__
571         (
572             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
573             "1:\n\t"
574             "ldrexh   %[original], %[storage]\n\t"           // original = zero_extend(*(&storage))
575             "mvn      %[result], %[original]\n\t"            // result = NOT original
576             "strexh   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
577             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
578             "bne      1b\n\t"                                // if (!flags.equal) goto retry
579             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
580             : [original] "=&r" (original),  // %0
581               [result] "=&r" (result),      // %1
582               [tmp] "=&l" (tmp),            // %2
583               [storage] "+Q" (storage)      // %3
584             :
585             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
586         );
587         core_arch_operations_gcc_arm_base::fence_after(order);
588         return static_cast< storage_type >(result);
589     }
590 };
591 
592 template< typename Base, bool Signed >
593 struct extra_operations< Base, 2u, Signed, true > :
594     public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 2u, Signed > >
595 {
596 };
597 
598 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
599 
600 template< typename Base, bool Signed >
601 struct extra_operations_gcc_arm< Base, 4u, Signed > :
602     public extra_operations_generic< Base, 4u, Signed >
603 {
604     typedef extra_operations_generic< Base, 4u, Signed > base_type;
605     typedef typename base_type::storage_type storage_type;
606 
607     static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
608     {
609         core_arch_operations_gcc_arm_base::fence_before(order);
610         uint32_t tmp;
611         storage_type original, result;
612         __asm__ __volatile__
613         (
614             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
615             "1:\n\t"
616             "ldrex    %[original], %[storage]\n\t"           // original = *(&storage)
617             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
618             "strex    %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
619             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
620             "bne      1b\n\t"                                // if (!flags.equal) goto retry
621             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
622             : [original] "=&r" (original),  // %0
623               [result] "=&r" (result),      // %1
624               [tmp] "=&l" (tmp),            // %2
625               [storage] "+Q" (storage)      // %3
626             :
627             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
628         );
629         core_arch_operations_gcc_arm_base::fence_after(order);
630         return original;
631     }
632 
633     static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
634     {
635         core_arch_operations_gcc_arm_base::fence_before(order);
636         uint32_t tmp;
637         storage_type original, result;
638         __asm__ __volatile__
639         (
640             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
641             "1:\n\t"
642             "ldrex    %[original], %[storage]\n\t"           // original = *(&storage)
643             "rsb      %[result], %[original], #0\n\t"        // result = 0 - original
644             "strex    %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
645             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
646             "bne      1b\n\t"                                // if (!flags.equal) goto retry
647             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
648             : [original] "=&r" (original),  // %0
649               [result] "=&r" (result),      // %1
650               [tmp] "=&l" (tmp),            // %2
651               [storage] "+Q" (storage)      // %3
652             :
653             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
654         );
655         core_arch_operations_gcc_arm_base::fence_after(order);
656         return result;
657     }
658 
659     static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
660     {
661         core_arch_operations_gcc_arm_base::fence_before(order);
662         uint32_t tmp;
663         storage_type original, result;
664         __asm__ __volatile__
665         (
666             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
667             "1:\n\t"
668             "ldrex   %[original], %[storage]\n\t"           // original = *(&storage)
669             "add     %[result], %[original], %[value]\n\t"  // result = original + value
670             "strex   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
671             "teq     %[tmp], #0\n\t"                        // flags = tmp==0
672             "bne     1b\n\t"                                // if (!flags.equal) goto retry
673             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
674             : [original] "=&r" (original),  // %0
675               [result] "=&r" (result),      // %1
676               [tmp] "=&l" (tmp),            // %2
677               [storage] "+Q" (storage)      // %3
678             : [value] "Ir" (v)              // %4
679             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
680         );
681         core_arch_operations_gcc_arm_base::fence_after(order);
682         return result;
683     }
684 
685     static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
686     {
687         core_arch_operations_gcc_arm_base::fence_before(order);
688         uint32_t tmp;
689         storage_type original, result;
690         __asm__ __volatile__
691         (
692             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
693             "1:\n\t"
694             "ldrex   %[original], %[storage]\n\t"           // original = *(&storage)
695             "sub     %[result], %[original], %[value]\n\t"  // result = original - value
696             "strex   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
697             "teq     %[tmp], #0\n\t"                        // flags = tmp==0
698             "bne     1b\n\t"                                // if (!flags.equal) goto retry
699             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
700             : [original] "=&r" (original),  // %0
701               [result] "=&r" (result),      // %1
702               [tmp] "=&l" (tmp),            // %2
703               [storage] "+Q" (storage)      // %3
704             : [value] "Ir" (v)              // %4
705             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
706         );
707         core_arch_operations_gcc_arm_base::fence_after(order);
708         return result;
709     }
710 
711     static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
712     {
713         core_arch_operations_gcc_arm_base::fence_before(order);
714         uint32_t tmp;
715         storage_type original, result;
716         __asm__ __volatile__
717         (
718             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
719             "1:\n\t"
720             "ldrex   %[original], %[storage]\n\t"           // original = *(&storage)
721             "and     %[result], %[original], %[value]\n\t"  // result = original & value
722             "strex   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
723             "teq     %[tmp], #0\n\t"                        // flags = tmp==0
724             "bne     1b\n\t"                                // if (!flags.equal) goto retry
725             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
726             : [original] "=&r" (original),  // %0
727               [result] "=&r" (result),      // %1
728               [tmp] "=&l" (tmp),            // %2
729               [storage] "+Q" (storage)      // %3
730             : [value] "Ir" (v)              // %4
731             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
732         );
733         core_arch_operations_gcc_arm_base::fence_after(order);
734         return result;
735     }
736 
737     static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
738     {
739         core_arch_operations_gcc_arm_base::fence_before(order);
740         uint32_t tmp;
741         storage_type original, result;
742         __asm__ __volatile__
743         (
744             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
745             "1:\n\t"
746             "ldrex   %[original], %[storage]\n\t"           // original = *(&storage)
747             "orr     %[result], %[original], %[value]\n\t"  // result = original | value
748             "strex   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
749             "teq     %[tmp], #0\n\t"                        // flags = tmp==0
750             "bne     1b\n\t"                                // if (!flags.equal) goto retry
751             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
752             : [original] "=&r" (original),  // %0
753               [result] "=&r" (result),      // %1
754               [tmp] "=&l" (tmp),            // %2
755               [storage] "+Q" (storage)      // %3
756             : [value] "Ir" (v)              // %4
757             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
758         );
759         core_arch_operations_gcc_arm_base::fence_after(order);
760         return result;
761     }
762 
763     static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
764     {
765         core_arch_operations_gcc_arm_base::fence_before(order);
766         uint32_t tmp;
767         storage_type original, result;
768         __asm__ __volatile__
769         (
770             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
771             "1:\n\t"
772             "ldrex   %[original], %[storage]\n\t"           // original = *(&storage)
773             "eor     %[result], %[original], %[value]\n\t"  // result = original ^ value
774             "strex   %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
775             "teq     %[tmp], #0\n\t"                        // flags = tmp==0
776             "bne     1b\n\t"                                // if (!flags.equal) goto retry
777             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
778             : [original] "=&r" (original),  // %0
779               [result] "=&r" (result),      // %1
780               [tmp] "=&l" (tmp),            // %2
781               [storage] "+Q" (storage)      // %3
782             : [value] "Ir" (v)              // %4
783             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
784         );
785         core_arch_operations_gcc_arm_base::fence_after(order);
786         return result;
787     }
788 
789     static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
790     {
791         core_arch_operations_gcc_arm_base::fence_before(order);
792         uint32_t tmp;
793         storage_type original, result;
794         __asm__ __volatile__
795         (
796             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
797             "1:\n\t"
798             "ldrex    %[original], %[storage]\n\t"           // original = *(&storage)
799             "mvn      %[result], %[original]\n\t"            // result = NOT original
800             "strex    %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
801             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
802             "bne      1b\n\t"                                // if (!flags.equal) goto retry
803             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
804             : [original] "=&r" (original),  // %0
805               [result] "=&r" (result),      // %1
806               [tmp] "=&l" (tmp),            // %2
807               [storage] "+Q" (storage)      // %3
808             :
809             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
810         );
811         core_arch_operations_gcc_arm_base::fence_after(order);
812         return original;
813     }
814 
815     static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
816     {
817         core_arch_operations_gcc_arm_base::fence_before(order);
818         uint32_t tmp;
819         storage_type original, result;
820         __asm__ __volatile__
821         (
822             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
823             "1:\n\t"
824             "ldrex    %[original], %[storage]\n\t"           // original = *(&storage)
825             "mvn      %[result], %[original]\n\t"            // result = NOT original
826             "strex    %[tmp], %[result], %[storage]\n\t"     // *(&storage) = result, tmp = store failed
827             "teq      %[tmp], #0\n\t"                        // flags = tmp==0
828             "bne      1b\n\t"                                // if (!flags.equal) goto retry
829             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
830             : [original] "=&r" (original),  // %0
831               [result] "=&r" (result),      // %1
832               [tmp] "=&l" (tmp),            // %2
833               [storage] "+Q" (storage)      // %3
834             :
835             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
836         );
837         core_arch_operations_gcc_arm_base::fence_after(order);
838         return result;
839     }
840 };
841 
842 template< typename Base, bool Signed >
843 struct extra_operations< Base, 4u, Signed, true > :
844     public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 4u, Signed > >
845 {
846 };
847 
848 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
849 
850 template< typename Base, bool Signed >
851 struct extra_operations_gcc_arm< Base, 8u, Signed > :
852     public extra_operations_generic< Base, 8u, Signed >
853 {
854     typedef extra_operations_generic< Base, 8u, Signed > base_type;
855     typedef typename base_type::storage_type storage_type;
856 
857     static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
858     {
859         core_arch_operations_gcc_arm_base::fence_before(order);
860         storage_type original, result;
861         uint32_t tmp;
862         __asm__ __volatile__
863         (
864             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
865             "1:\n\t"
866             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
867             "mvn     %2, %1\n\t"                      // result = NOT original
868             "mvn     %H2, %H1\n\t"
869             "adds   " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
870             "adc    " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
871             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
872             "teq     %0, #0\n\t"                      // flags = tmp==0
873             "bne     1b\n\t"                          // if (!flags.equal) goto retry
874             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
875             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
876               "=&r" (original),  // %1
877               "=&r" (result),    // %2
878               "+Q" (storage)     // %3
879             :
880             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
881         );
882         core_arch_operations_gcc_arm_base::fence_after(order);
883         return original;
884     }
885 
886     static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
887     {
888         core_arch_operations_gcc_arm_base::fence_before(order);
889         storage_type original, result;
890         uint32_t tmp;
891         __asm__ __volatile__
892         (
893             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
894             "1:\n\t"
895             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
896             "mvn     %2, %1\n\t"                      // result = NOT original
897             "mvn     %H2, %H1\n\t"
898             "adds   " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
899             "adc    " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
900             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
901             "teq     %0, #0\n\t"                      // flags = tmp==0
902             "bne     1b\n\t"                          // if (!flags.equal) goto retry
903             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
904             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
905               "=&r" (original),  // %1
906               "=&r" (result),    // %2
907               "+Q" (storage)     // %3
908             :
909             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
910         );
911         core_arch_operations_gcc_arm_base::fence_after(order);
912         return result;
913     }
914 
915     static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
916     {
917         core_arch_operations_gcc_arm_base::fence_before(order);
918         storage_type original, result;
919         uint32_t tmp;
920         __asm__ __volatile__
921         (
922             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
923             "1:\n\t"
924             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
925             "adds   " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original + value
926             "adc    " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
927             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
928             "teq     %0, #0\n\t"                      // flags = tmp==0
929             "bne     1b\n\t"                          // if (!flags.equal) goto retry
930             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
931             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
932               "=&r" (original),  // %1
933               "=&r" (result),    // %2
934               "+Q" (storage)     // %3
935             : "r" (v)            // %4
936             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
937         );
938         core_arch_operations_gcc_arm_base::fence_after(order);
939         return result;
940     }
941 
942     static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
943     {
944         core_arch_operations_gcc_arm_base::fence_before(order);
945         storage_type original, result;
946         uint32_t tmp;
947         __asm__ __volatile__
948         (
949             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
950             "1:\n\t"
951             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
952             "subs   " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original - value
953             "sbc    " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
954             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
955             "teq     %0, #0\n\t"                      // flags = tmp==0
956             "bne     1b\n\t"                          // if (!flags.equal) goto retry
957             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
958             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
959               "=&r" (original),  // %1
960               "=&r" (result),    // %2
961               "+Q" (storage)     // %3
962             : "r" (v)            // %4
963             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
964         );
965         core_arch_operations_gcc_arm_base::fence_after(order);
966         return result;
967     }
968 
969     static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
970     {
971         core_arch_operations_gcc_arm_base::fence_before(order);
972         storage_type original, result;
973         uint32_t tmp;
974         __asm__ __volatile__
975         (
976             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
977             "1:\n\t"
978             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
979             "and     %2, %1, %4\n\t"                  // result = original & value
980             "and     %H2, %H1, %H4\n\t"
981             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
982             "teq     %0, #0\n\t"                      // flags = tmp==0
983             "bne     1b\n\t"                          // if (!flags.equal) goto retry
984             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
985             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
986               "=&r" (original),  // %1
987               "=&r" (result),    // %2
988               "+Q" (storage)     // %3
989             : "r" (v)            // %4
990             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
991         );
992         core_arch_operations_gcc_arm_base::fence_after(order);
993         return result;
994     }
995 
996     static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
997     {
998         core_arch_operations_gcc_arm_base::fence_before(order);
999         storage_type original, result;
1000         uint32_t tmp;
1001         __asm__ __volatile__
1002         (
1003             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1004             "1:\n\t"
1005             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
1006             "orr     %2, %1, %4\n\t"                  // result = original | value
1007             "orr     %H2, %H1, %H4\n\t"
1008             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
1009             "teq     %0, #0\n\t"                      // flags = tmp==0
1010             "bne     1b\n\t"                          // if (!flags.equal) goto retry
1011             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1012             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1013               "=&r" (original),  // %1
1014               "=&r" (result),    // %2
1015               "+Q" (storage)     // %3
1016             : "r" (v)            // %4
1017             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1018         );
1019         core_arch_operations_gcc_arm_base::fence_after(order);
1020         return result;
1021     }
1022 
1023     static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1024     {
1025         core_arch_operations_gcc_arm_base::fence_before(order);
1026         storage_type original, result;
1027         uint32_t tmp;
1028         __asm__ __volatile__
1029         (
1030             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1031             "1:\n\t"
1032             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
1033             "eor     %2, %1, %4\n\t"                  // result = original ^ value
1034             "eor     %H2, %H1, %H4\n\t"
1035             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
1036             "teq     %0, #0\n\t"                      // flags = tmp==0
1037             "bne     1b\n\t"                          // if (!flags.equal) goto retry
1038             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1039             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1040               "=&r" (original),  // %1
1041               "=&r" (result),    // %2
1042               "+Q" (storage)     // %3
1043             : "r" (v)            // %4
1044             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1045         );
1046         core_arch_operations_gcc_arm_base::fence_after(order);
1047         return result;
1048     }
1049 
1050     static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1051     {
1052         core_arch_operations_gcc_arm_base::fence_before(order);
1053         storage_type original, result;
1054         uint32_t tmp;
1055         __asm__ __volatile__
1056         (
1057             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1058             "1:\n\t"
1059             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
1060             "mvn     %2, %1\n\t"                      // result = NOT original
1061             "mvn     %H2, %H1\n\t"
1062             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
1063             "teq     %0, #0\n\t"                      // flags = tmp==0
1064             "bne     1b\n\t"                          // if (!flags.equal) goto retry
1065             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1066             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1067               "=&r" (original),  // %1
1068               "=&r" (result),    // %2
1069               "+Q" (storage)     // %3
1070             :
1071             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1072         );
1073         core_arch_operations_gcc_arm_base::fence_after(order);
1074         return original;
1075     }
1076 
1077     static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1078     {
1079         core_arch_operations_gcc_arm_base::fence_before(order);
1080         storage_type original, result;
1081         uint32_t tmp;
1082         __asm__ __volatile__
1083         (
1084             BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1085             "1:\n\t"
1086             "ldrexd  %1, %H1, %3\n\t"                 // original = *(&storage)
1087             "mvn     %2, %1\n\t"                      // result = NOT original
1088             "mvn     %H2, %H1\n\t"
1089             "strexd  %0, %2, %H2, %3\n\t"             // *(&storage) = result, tmp = store failed
1090             "teq     %0, #0\n\t"                      // flags = tmp==0
1091             "bne     1b\n\t"                          // if (!flags.equal) goto retry
1092             BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1093             : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1094               "=&r" (original),  // %1
1095               "=&r" (result),    // %2
1096               "+Q" (storage)     // %3
1097             :
1098             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1099         );
1100         core_arch_operations_gcc_arm_base::fence_after(order);
1101         return result;
1102     }
1103 };
1104 
1105 template< typename Base, bool Signed >
1106 struct extra_operations< Base, 8u, Signed, true > :
1107     public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 8u, Signed > >
1108 {
1109 };
1110 
1111 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
1112 
1113 } // namespace detail
1114 } // namespace atomics
1115 } // namespace boost
1116 
1117 #include <boost/atomic/detail/footer.hpp>
1118 
1119 #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
1120