1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2013 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/ops_gcc_arm.hpp
12 *
13 * This header contains implementation of the \c operations template.
14 */
15
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_ARM_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_GCC_ARM_HPP_INCLUDED_
18
19 #include <cstddef>
20 #include <boost/cstdint.hpp>
21 #include <boost/memory_order.hpp>
22 #include <boost/atomic/detail/config.hpp>
23 #include <boost/atomic/detail/storage_type.hpp>
24 #include <boost/atomic/detail/integral_extend.hpp>
25 #include <boost/atomic/detail/operations_fwd.hpp>
26 #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
27 #include <boost/atomic/capabilities.hpp>
28
29 #ifdef BOOST_HAS_PRAGMA_ONCE
30 #pragma once
31 #endif
32
33 namespace boost {
34 namespace atomics {
35 namespace detail {
36
37 // From the ARM Architecture Reference Manual for architecture v6:
38 //
39 // LDREX{<cond>} <Rd>, [<Rn>]
40 // <Rd> Specifies the destination register for the memory word addressed by <Rd>
41 // <Rn> Specifies the register containing the address.
42 //
43 // STREX{<cond>} <Rd>, <Rm>, [<Rn>]
44 // <Rd> Specifies the destination register for the returned status value.
45 // 0 if the operation updates memory
46 // 1 if the operation fails to update memory
47 // <Rm> Specifies the register containing the word to be stored to memory.
48 // <Rn> Specifies the register containing the address.
49 // Rd must not be the same register as Rm or Rn.
50 //
51 // ARM v7 is like ARM v6 plus:
52 // There are half-word and byte versions of the LDREX and STREX instructions,
53 // LDREXH, LDREXB, STREXH and STREXB.
54 // There are also double-word versions, LDREXD and STREXD.
55 // (Actually it looks like these are available from version 6k onwards.)
56 // FIXME these are not yet used; should be mostly a matter of copy-and-paste.
57 // I think you can supply an immediate offset to the address.
58
59 template< bool Signed >
60 struct operations< 4u, Signed > :
61 public gcc_arm_operations_base
62 {
63 typedef typename make_storage_type< 4u >::type storage_type;
64 typedef typename make_storage_type< 4u >::aligned aligned_storage_type;
65
66 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
67 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
68
storeboost::atomics::detail::operations69 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
70 {
71 fence_before(order);
72 storage = v;
73 fence_after_store(order);
74 }
75
loadboost::atomics::detail::operations76 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
77 {
78 storage_type v = storage;
79 fence_after(order);
80 return v;
81 }
82
exchangeboost::atomics::detail::operations83 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
84 {
85 storage_type original;
86 fence_before(order);
87 uint32_t tmp;
88 __asm__ __volatile__
89 (
90 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
91 "1:\n"
92 "ldrex %[original], %[storage]\n" // load the original value
93 "strex %[tmp], %[value], %[storage]\n" // store the replacement, tmp = store failed
94 "teq %[tmp], #0\n" // check if store succeeded
95 "bne 1b\n"
96 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
97 : [tmp] "=&l" (tmp), [original] "=&r" (original), [storage] "+Q" (storage)
98 : [value] "r" (v)
99 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
100 );
101 fence_after(order);
102 return original;
103 }
104
compare_exchange_weakboost::atomics::detail::operations105 static BOOST_FORCEINLINE bool compare_exchange_weak(
106 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
107 {
108 fence_before(success_order);
109 uint32_t success;
110 uint32_t tmp;
111 storage_type original;
112 __asm__ __volatile__
113 (
114 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
115 "mov %[success], #0\n" // success = 0
116 "ldrex %[original], %[storage]\n" // original = *(&storage)
117 "cmp %[original], %[expected]\n" // flags = original==expected
118 "itt eq\n" // [hint that the following 2 instructions are conditional on flags.equal]
119 "strexeq %[success], %[desired], %[storage]\n" // if (flags.equal) *(&storage) = desired, success = store failed
120 "eoreq %[success], %[success], #1\n" // if (flags.equal) success ^= 1 (i.e. make it 1 if store succeeded)
121 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
122 : [original] "=&r" (original), // %0
123 [success] "=&r" (success), // %1
124 [tmp] "=&l" (tmp), // %2
125 [storage] "+Q" (storage) // %3
126 : [expected] "Ir" (expected), // %4
127 [desired] "r" (desired) // %5
128 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
129 );
130 if (success)
131 fence_after(success_order);
132 else
133 fence_after(failure_order);
134 expected = original;
135 return !!success;
136 }
137
compare_exchange_strongboost::atomics::detail::operations138 static BOOST_FORCEINLINE bool compare_exchange_strong(
139 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
140 {
141 fence_before(success_order);
142 uint32_t success;
143 uint32_t tmp;
144 storage_type original;
145 __asm__ __volatile__
146 (
147 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
148 "mov %[success], #0\n" // success = 0
149 "1:\n"
150 "ldrex %[original], %[storage]\n" // original = *(&storage)
151 "cmp %[original], %[expected]\n" // flags = original==expected
152 "bne 2f\n" // if (!flags.equal) goto end
153 "strex %[success], %[desired], %[storage]\n" // *(&storage) = desired, success = store failed
154 "eors %[success], %[success], #1\n" // success ^= 1 (i.e. make it 1 if store succeeded); flags.equal = success == 0
155 "beq 1b\n" // if (flags.equal) goto retry
156 "2:\n"
157 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
158 : [original] "=&r" (original), // %0
159 [success] "=&r" (success), // %1
160 [tmp] "=&l" (tmp), // %2
161 [storage] "+Q" (storage) // %3
162 : [expected] "Ir" (expected), // %4
163 [desired] "r" (desired) // %5
164 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
165 );
166 if (success)
167 fence_after(success_order);
168 else
169 fence_after(failure_order);
170 expected = original;
171 return !!success;
172 }
173
fetch_addboost::atomics::detail::operations174 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
175 {
176 fence_before(order);
177 uint32_t tmp;
178 storage_type original, result;
179 __asm__ __volatile__
180 (
181 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
182 "1:\n"
183 "ldrex %[original], %[storage]\n" // original = *(&storage)
184 "add %[result], %[original], %[value]\n" // result = original + value
185 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
186 "teq %[tmp], #0\n" // flags = tmp==0
187 "bne 1b\n" // if (!flags.equal) goto retry
188 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
189 : [original] "=&r" (original), // %0
190 [result] "=&r" (result), // %1
191 [tmp] "=&l" (tmp), // %2
192 [storage] "+Q" (storage) // %3
193 : [value] "Ir" (v) // %4
194 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
195 );
196 fence_after(order);
197 return original;
198 }
199
fetch_subboost::atomics::detail::operations200 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
201 {
202 fence_before(order);
203 uint32_t tmp;
204 storage_type original, result;
205 __asm__ __volatile__
206 (
207 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
208 "1:\n"
209 "ldrex %[original], %[storage]\n" // original = *(&storage)
210 "sub %[result], %[original], %[value]\n" // result = original - value
211 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
212 "teq %[tmp], #0\n" // flags = tmp==0
213 "bne 1b\n" // if (!flags.equal) goto retry
214 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
215 : [original] "=&r" (original), // %0
216 [result] "=&r" (result), // %1
217 [tmp] "=&l" (tmp), // %2
218 [storage] "+Q" (storage) // %3
219 : [value] "Ir" (v) // %4
220 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
221 );
222 fence_after(order);
223 return original;
224 }
225
fetch_andboost::atomics::detail::operations226 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
227 {
228 fence_before(order);
229 uint32_t tmp;
230 storage_type original, result;
231 __asm__ __volatile__
232 (
233 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
234 "1:\n"
235 "ldrex %[original], %[storage]\n" // original = *(&storage)
236 "and %[result], %[original], %[value]\n" // result = original & value
237 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
238 "teq %[tmp], #0\n" // flags = tmp==0
239 "bne 1b\n" // if (!flags.equal) goto retry
240 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
241 : [original] "=&r" (original), // %0
242 [result] "=&r" (result), // %1
243 [tmp] "=&l" (tmp), // %2
244 [storage] "+Q" (storage) // %3
245 : [value] "Ir" (v) // %4
246 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
247 );
248 fence_after(order);
249 return original;
250 }
251
fetch_orboost::atomics::detail::operations252 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
253 {
254 fence_before(order);
255 uint32_t tmp;
256 storage_type original, result;
257 __asm__ __volatile__
258 (
259 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
260 "1:\n"
261 "ldrex %[original], %[storage]\n" // original = *(&storage)
262 "orr %[result], %[original], %[value]\n" // result = original | value
263 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
264 "teq %[tmp], #0\n" // flags = tmp==0
265 "bne 1b\n" // if (!flags.equal) goto retry
266 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
267 : [original] "=&r" (original), // %0
268 [result] "=&r" (result), // %1
269 [tmp] "=&l" (tmp), // %2
270 [storage] "+Q" (storage) // %3
271 : [value] "Ir" (v) // %4
272 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
273 );
274 fence_after(order);
275 return original;
276 }
277
fetch_xorboost::atomics::detail::operations278 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
279 {
280 fence_before(order);
281 uint32_t tmp;
282 storage_type original, result;
283 __asm__ __volatile__
284 (
285 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
286 "1:\n"
287 "ldrex %[original], %[storage]\n" // original = *(&storage)
288 "eor %[result], %[original], %[value]\n" // result = original ^ value
289 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
290 "teq %[tmp], #0\n" // flags = tmp==0
291 "bne 1b\n" // if (!flags.equal) goto retry
292 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
293 : [original] "=&r" (original), // %0
294 [result] "=&r" (result), // %1
295 [tmp] "=&l" (tmp), // %2
296 [storage] "+Q" (storage) // %3
297 : [value] "Ir" (v) // %4
298 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
299 );
300 fence_after(order);
301 return original;
302 }
303
test_and_setboost::atomics::detail::operations304 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
305 {
306 return !!exchange(storage, (storage_type)1, order);
307 }
308
clearboost::atomics::detail::operations309 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
310 {
311 store(storage, 0, order);
312 }
313 };
314
315 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
316
317 template< bool Signed >
318 struct operations< 1u, Signed > :
319 public gcc_arm_operations_base
320 {
321 typedef typename make_storage_type< 1u >::type storage_type;
322 typedef typename make_storage_type< 1u >::aligned aligned_storage_type;
323 typedef typename make_storage_type< 4u >::type extended_storage_type;
324
325 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
326 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
327
storeboost::atomics::detail::operations328 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
329 {
330 fence_before(order);
331 storage = v;
332 fence_after_store(order);
333 }
334
loadboost::atomics::detail::operations335 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
336 {
337 storage_type v = storage;
338 fence_after(order);
339 return v;
340 }
341
exchangeboost::atomics::detail::operations342 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
343 {
344 extended_storage_type original;
345 fence_before(order);
346 uint32_t tmp;
347 __asm__ __volatile__
348 (
349 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
350 "1:\n"
351 "ldrexb %[original], %[storage]\n" // load the original value and zero-extend to 32 bits
352 "strexb %[tmp], %[value], %[storage]\n" // store the replacement, tmp = store failed
353 "teq %[tmp], #0\n" // check if store succeeded
354 "bne 1b\n"
355 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
356 : [tmp] "=&l" (tmp), [original] "=&r" (original), [storage] "+Q" (storage)
357 : [value] "r" (v)
358 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
359 );
360 fence_after(order);
361 return static_cast< storage_type >(original);
362 }
363
compare_exchange_weakboost::atomics::detail::operations364 static BOOST_FORCEINLINE bool compare_exchange_weak(
365 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
366 {
367 fence_before(success_order);
368 uint32_t success;
369 uint32_t tmp;
370 extended_storage_type original;
371 __asm__ __volatile__
372 (
373 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
374 "mov %[success], #0\n" // success = 0
375 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
376 "cmp %[original], %[expected]\n" // flags = original==expected
377 "itt eq\n" // [hint that the following 2 instructions are conditional on flags.equal]
378 "strexbeq %[success], %[desired], %[storage]\n" // if (flags.equal) *(&storage) = desired, success = store failed
379 "eoreq %[success], %[success], #1\n" // if (flags.equal) success ^= 1 (i.e. make it 1 if store succeeded)
380 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
381 : [original] "=&r" (original), // %0
382 [success] "=&r" (success), // %1
383 [tmp] "=&l" (tmp), // %2
384 [storage] "+Q" (storage) // %3
385 : [expected] "Ir" (atomics::detail::zero_extend< extended_storage_type >(expected)), // %4
386 [desired] "r" (desired) // %5
387 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
388 );
389 if (success)
390 fence_after(success_order);
391 else
392 fence_after(failure_order);
393 expected = static_cast< storage_type >(original);
394 return !!success;
395 }
396
compare_exchange_strongboost::atomics::detail::operations397 static BOOST_FORCEINLINE bool compare_exchange_strong(
398 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
399 {
400 fence_before(success_order);
401 uint32_t success;
402 uint32_t tmp;
403 extended_storage_type original;
404 __asm__ __volatile__
405 (
406 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
407 "mov %[success], #0\n" // success = 0
408 "1:\n"
409 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
410 "cmp %[original], %[expected]\n" // flags = original==expected
411 "bne 2f\n" // if (!flags.equal) goto end
412 "strexb %[success], %[desired], %[storage]\n" // *(&storage) = desired, success = store failed
413 "eors %[success], %[success], #1\n" // success ^= 1 (i.e. make it 1 if store succeeded); flags.equal = success == 0
414 "beq 1b\n" // if (flags.equal) goto retry
415 "2:\n"
416 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
417 : [original] "=&r" (original), // %0
418 [success] "=&r" (success), // %1
419 [tmp] "=&l" (tmp), // %2
420 [storage] "+Q" (storage) // %3
421 : [expected] "Ir" (atomics::detail::zero_extend< extended_storage_type >(expected)), // %4
422 [desired] "r" (desired) // %5
423 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
424 );
425 if (success)
426 fence_after(success_order);
427 else
428 fence_after(failure_order);
429 expected = static_cast< storage_type >(original);
430 return !!success;
431 }
432
fetch_addboost::atomics::detail::operations433 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
434 {
435 fence_before(order);
436 uint32_t tmp;
437 extended_storage_type original, result;
438 __asm__ __volatile__
439 (
440 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
441 "1:\n"
442 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
443 "add %[result], %[original], %[value]\n" // result = original + value
444 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
445 "teq %[tmp], #0\n" // flags = tmp==0
446 "bne 1b\n" // if (!flags.equal) goto retry
447 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
448 : [original] "=&r" (original), // %0
449 [result] "=&r" (result), // %1
450 [tmp] "=&l" (tmp), // %2
451 [storage] "+Q" (storage) // %3
452 : [value] "Ir" (v) // %4
453 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
454 );
455 fence_after(order);
456 return static_cast< storage_type >(original);
457 }
458
fetch_subboost::atomics::detail::operations459 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
460 {
461 fence_before(order);
462 uint32_t tmp;
463 extended_storage_type original, result;
464 __asm__ __volatile__
465 (
466 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
467 "1:\n"
468 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
469 "sub %[result], %[original], %[value]\n" // result = original - value
470 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
471 "teq %[tmp], #0\n" // flags = tmp==0
472 "bne 1b\n" // if (!flags.equal) goto retry
473 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
474 : [original] "=&r" (original), // %0
475 [result] "=&r" (result), // %1
476 [tmp] "=&l" (tmp), // %2
477 [storage] "+Q" (storage) // %3
478 : [value] "Ir" (v) // %4
479 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
480 );
481 fence_after(order);
482 return static_cast< storage_type >(original);
483 }
484
fetch_andboost::atomics::detail::operations485 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
486 {
487 fence_before(order);
488 uint32_t tmp;
489 extended_storage_type original, result;
490 __asm__ __volatile__
491 (
492 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
493 "1:\n"
494 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
495 "and %[result], %[original], %[value]\n" // result = original & value
496 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
497 "teq %[tmp], #0\n" // flags = tmp==0
498 "bne 1b\n" // if (!flags.equal) goto retry
499 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
500 : [original] "=&r" (original), // %0
501 [result] "=&r" (result), // %1
502 [tmp] "=&l" (tmp), // %2
503 [storage] "+Q" (storage) // %3
504 : [value] "Ir" (v) // %4
505 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
506 );
507 fence_after(order);
508 return static_cast< storage_type >(original);
509 }
510
fetch_orboost::atomics::detail::operations511 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
512 {
513 fence_before(order);
514 uint32_t tmp;
515 extended_storage_type original, result;
516 __asm__ __volatile__
517 (
518 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
519 "1:\n"
520 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
521 "orr %[result], %[original], %[value]\n" // result = original | value
522 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
523 "teq %[tmp], #0\n" // flags = tmp==0
524 "bne 1b\n" // if (!flags.equal) goto retry
525 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
526 : [original] "=&r" (original), // %0
527 [result] "=&r" (result), // %1
528 [tmp] "=&l" (tmp), // %2
529 [storage] "+Q" (storage) // %3
530 : [value] "Ir" (v) // %4
531 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
532 );
533 fence_after(order);
534 return static_cast< storage_type >(original);
535 }
536
fetch_xorboost::atomics::detail::operations537 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
538 {
539 fence_before(order);
540 uint32_t tmp;
541 extended_storage_type original, result;
542 __asm__ __volatile__
543 (
544 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
545 "1:\n"
546 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
547 "eor %[result], %[original], %[value]\n" // result = original ^ value
548 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
549 "teq %[tmp], #0\n" // flags = tmp==0
550 "bne 1b\n" // if (!flags.equal) goto retry
551 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
552 : [original] "=&r" (original), // %0
553 [result] "=&r" (result), // %1
554 [tmp] "=&l" (tmp), // %2
555 [storage] "+Q" (storage) // %3
556 : [value] "Ir" (v) // %4
557 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
558 );
559 fence_after(order);
560 return static_cast< storage_type >(original);
561 }
562
test_and_setboost::atomics::detail::operations563 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
564 {
565 return !!exchange(storage, (storage_type)1, order);
566 }
567
clearboost::atomics::detail::operations568 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
569 {
570 store(storage, 0, order);
571 }
572 };
573
574 #else // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
575
576 template< >
577 struct operations< 1u, false > :
578 public operations< 4u, false >
579 {
580 typedef operations< 4u, false > base_type;
581 typedef base_type::storage_type storage_type;
582
fetch_addboost::atomics::detail::operations583 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
584 {
585 fence_before(order);
586 uint32_t tmp;
587 storage_type original, result;
588 __asm__ __volatile__
589 (
590 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
591 "1:\n"
592 "ldrex %[original], %[storage]\n" // original = *(&storage)
593 "add %[result], %[original], %[value]\n" // result = original + value
594 "uxtb %[result], %[result]\n" // zero extend result from 8 to 32 bits
595 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
596 "teq %[tmp], #0\n" // flags = tmp==0
597 "bne 1b\n" // if (!flags.equal) goto retry
598 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
599 : [original] "=&r" (original), // %0
600 [result] "=&r" (result), // %1
601 [tmp] "=&l" (tmp), // %2
602 [storage] "+Q" (storage) // %3
603 : [value] "Ir" (v) // %4
604 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
605 );
606 fence_after(order);
607 return original;
608 }
609
fetch_subboost::atomics::detail::operations610 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
611 {
612 fence_before(order);
613 uint32_t tmp;
614 storage_type original, result;
615 __asm__ __volatile__
616 (
617 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
618 "1:\n"
619 "ldrex %[original], %[storage]\n" // original = *(&storage)
620 "sub %[result], %[original], %[value]\n" // result = original - value
621 "uxtb %[result], %[result]\n" // zero extend result from 8 to 32 bits
622 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
623 "teq %[tmp], #0\n" // flags = tmp==0
624 "bne 1b\n" // if (!flags.equal) goto retry
625 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
626 : [original] "=&r" (original), // %0
627 [result] "=&r" (result), // %1
628 [tmp] "=&l" (tmp), // %2
629 [storage] "+Q" (storage) // %3
630 : [value] "Ir" (v) // %4
631 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
632 );
633 fence_after(order);
634 return original;
635 }
636 };
637
638 template< >
639 struct operations< 1u, true > :
640 public operations< 4u, true >
641 {
642 typedef operations< 4u, true > base_type;
643 typedef base_type::storage_type storage_type;
644
fetch_addboost::atomics::detail::operations645 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
646 {
647 fence_before(order);
648 uint32_t tmp;
649 storage_type original, result;
650 __asm__ __volatile__
651 (
652 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
653 "1:\n"
654 "ldrex %[original], %[storage]\n" // original = *(&storage)
655 "add %[result], %[original], %[value]\n" // result = original + value
656 "sxtb %[result], %[result]\n" // sign extend result from 8 to 32 bits
657 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
658 "teq %[tmp], #0\n" // flags = tmp==0
659 "bne 1b\n" // if (!flags.equal) goto retry
660 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
661 : [original] "=&r" (original), // %0
662 [result] "=&r" (result), // %1
663 [tmp] "=&l" (tmp), // %2
664 [storage] "+Q" (storage) // %3
665 : [value] "Ir" (v) // %4
666 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
667 );
668 fence_after(order);
669 return original;
670 }
671
fetch_subboost::atomics::detail::operations672 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
673 {
674 fence_before(order);
675 uint32_t tmp;
676 storage_type original, result;
677 __asm__ __volatile__
678 (
679 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
680 "1:\n"
681 "ldrex %[original], %[storage]\n" // original = *(&storage)
682 "sub %[result], %[original], %[value]\n" // result = original - value
683 "sxtb %[result], %[result]\n" // sign extend result from 8 to 32 bits
684 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
685 "teq %[tmp], #0\n" // flags = tmp==0
686 "bne 1b\n" // if (!flags.equal) goto retry
687 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
688 : [original] "=&r" (original), // %0
689 [result] "=&r" (result), // %1
690 [tmp] "=&l" (tmp), // %2
691 [storage] "+Q" (storage) // %3
692 : [value] "Ir" (v) // %4
693 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
694 );
695 fence_after(order);
696 return original;
697 }
698 };
699
700 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
701
702 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
703
704 template< bool Signed >
705 struct operations< 2u, Signed > :
706 public gcc_arm_operations_base
707 {
708 typedef typename make_storage_type< 2u >::type storage_type;
709 typedef typename make_storage_type< 2u >::aligned aligned_storage_type;
710 typedef typename make_storage_type< 4u >::type extended_storage_type;
711
712 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
713 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
714
storeboost::atomics::detail::operations715 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
716 {
717 fence_before(order);
718 storage = v;
719 fence_after_store(order);
720 }
721
loadboost::atomics::detail::operations722 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
723 {
724 storage_type v = storage;
725 fence_after(order);
726 return v;
727 }
728
exchangeboost::atomics::detail::operations729 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
730 {
731 extended_storage_type original;
732 fence_before(order);
733 uint32_t tmp;
734 __asm__ __volatile__
735 (
736 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
737 "1:\n"
738 "ldrexh %[original], %[storage]\n" // load the original value and zero-extend to 32 bits
739 "strexh %[tmp], %[value], %[storage]\n" // store the replacement, tmp = store failed
740 "teq %[tmp], #0\n" // check if store succeeded
741 "bne 1b\n"
742 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
743 : [tmp] "=&l" (tmp), [original] "=&r" (original), [storage] "+Q" (storage)
744 : [value] "r" (v)
745 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
746 );
747 fence_after(order);
748 return static_cast< storage_type >(original);
749 }
750
compare_exchange_weakboost::atomics::detail::operations751 static BOOST_FORCEINLINE bool compare_exchange_weak(
752 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
753 {
754 fence_before(success_order);
755 uint32_t success;
756 uint32_t tmp;
757 extended_storage_type original;
758 __asm__ __volatile__
759 (
760 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
761 "mov %[success], #0\n" // success = 0
762 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
763 "cmp %[original], %[expected]\n" // flags = original==expected
764 "itt eq\n" // [hint that the following 2 instructions are conditional on flags.equal]
765 "strexheq %[success], %[desired], %[storage]\n" // if (flags.equal) *(&storage) = desired, success = store failed
766 "eoreq %[success], %[success], #1\n" // if (flags.equal) success ^= 1 (i.e. make it 1 if store succeeded)
767 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
768 : [original] "=&r" (original), // %0
769 [success] "=&r" (success), // %1
770 [tmp] "=&l" (tmp), // %2
771 [storage] "+Q" (storage) // %3
772 : [expected] "Ir" (atomics::detail::zero_extend< extended_storage_type >(expected)), // %4
773 [desired] "r" (desired) // %5
774 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
775 );
776 if (success)
777 fence_after(success_order);
778 else
779 fence_after(failure_order);
780 expected = static_cast< storage_type >(original);
781 return !!success;
782 }
783
compare_exchange_strongboost::atomics::detail::operations784 static BOOST_FORCEINLINE bool compare_exchange_strong(
785 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
786 {
787 fence_before(success_order);
788 uint32_t success;
789 uint32_t tmp;
790 extended_storage_type original;
791 __asm__ __volatile__
792 (
793 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
794 "mov %[success], #0\n" // success = 0
795 "1:\n"
796 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
797 "cmp %[original], %[expected]\n" // flags = original==expected
798 "bne 2f\n" // if (!flags.equal) goto end
799 "strexh %[success], %[desired], %[storage]\n" // *(&storage) = desired, success = store failed
800 "eors %[success], %[success], #1\n" // success ^= 1 (i.e. make it 1 if store succeeded); flags.equal = success == 0
801 "beq 1b\n" // if (flags.equal) goto retry
802 "2:\n"
803 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
804 : [original] "=&r" (original), // %0
805 [success] "=&r" (success), // %1
806 [tmp] "=&l" (tmp), // %2
807 [storage] "+Q" (storage) // %3
808 : [expected] "Ir" (atomics::detail::zero_extend< extended_storage_type >(expected)), // %4
809 [desired] "r" (desired) // %5
810 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
811 );
812 if (success)
813 fence_after(success_order);
814 else
815 fence_after(failure_order);
816 expected = static_cast< storage_type >(original);
817 return !!success;
818 }
819
fetch_addboost::atomics::detail::operations820 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
821 {
822 fence_before(order);
823 uint32_t tmp;
824 extended_storage_type original, result;
825 __asm__ __volatile__
826 (
827 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
828 "1:\n"
829 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
830 "add %[result], %[original], %[value]\n" // result = original + value
831 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
832 "teq %[tmp], #0\n" // flags = tmp==0
833 "bne 1b\n" // if (!flags.equal) goto retry
834 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
835 : [original] "=&r" (original), // %0
836 [result] "=&r" (result), // %1
837 [tmp] "=&l" (tmp), // %2
838 [storage] "+Q" (storage) // %3
839 : [value] "Ir" (v) // %4
840 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
841 );
842 fence_after(order);
843 return static_cast< storage_type >(original);
844 }
845
fetch_subboost::atomics::detail::operations846 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
847 {
848 fence_before(order);
849 uint32_t tmp;
850 extended_storage_type original, result;
851 __asm__ __volatile__
852 (
853 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
854 "1:\n"
855 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
856 "sub %[result], %[original], %[value]\n" // result = original - value
857 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
858 "teq %[tmp], #0\n" // flags = tmp==0
859 "bne 1b\n" // if (!flags.equal) goto retry
860 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
861 : [original] "=&r" (original), // %0
862 [result] "=&r" (result), // %1
863 [tmp] "=&l" (tmp), // %2
864 [storage] "+Q" (storage) // %3
865 : [value] "Ir" (v) // %4
866 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
867 );
868 fence_after(order);
869 return static_cast< storage_type >(original);
870 }
871
fetch_andboost::atomics::detail::operations872 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
873 {
874 fence_before(order);
875 uint32_t tmp;
876 extended_storage_type original, result;
877 __asm__ __volatile__
878 (
879 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
880 "1:\n"
881 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
882 "and %[result], %[original], %[value]\n" // result = original & value
883 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
884 "teq %[tmp], #0\n" // flags = tmp==0
885 "bne 1b\n" // if (!flags.equal) goto retry
886 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
887 : [original] "=&r" (original), // %0
888 [result] "=&r" (result), // %1
889 [tmp] "=&l" (tmp), // %2
890 [storage] "+Q" (storage) // %3
891 : [value] "Ir" (v) // %4
892 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
893 );
894 fence_after(order);
895 return static_cast< storage_type >(original);
896 }
897
fetch_orboost::atomics::detail::operations898 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
899 {
900 fence_before(order);
901 uint32_t tmp;
902 extended_storage_type original, result;
903 __asm__ __volatile__
904 (
905 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
906 "1:\n"
907 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
908 "orr %[result], %[original], %[value]\n" // result = original | value
909 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
910 "teq %[tmp], #0\n" // flags = tmp==0
911 "bne 1b\n" // if (!flags.equal) goto retry
912 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
913 : [original] "=&r" (original), // %0
914 [result] "=&r" (result), // %1
915 [tmp] "=&l" (tmp), // %2
916 [storage] "+Q" (storage) // %3
917 : [value] "Ir" (v) // %4
918 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
919 );
920 fence_after(order);
921 return static_cast< storage_type >(original);
922 }
923
fetch_xorboost::atomics::detail::operations924 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
925 {
926 fence_before(order);
927 uint32_t tmp;
928 extended_storage_type original, result;
929 __asm__ __volatile__
930 (
931 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
932 "1:\n"
933 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
934 "eor %[result], %[original], %[value]\n" // result = original ^ value
935 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
936 "teq %[tmp], #0\n" // flags = tmp==0
937 "bne 1b\n" // if (!flags.equal) goto retry
938 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
939 : [original] "=&r" (original), // %0
940 [result] "=&r" (result), // %1
941 [tmp] "=&l" (tmp), // %2
942 [storage] "+Q" (storage) // %3
943 : [value] "Ir" (v) // %4
944 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
945 );
946 fence_after(order);
947 return static_cast< storage_type >(original);
948 }
949
test_and_setboost::atomics::detail::operations950 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
951 {
952 return !!exchange(storage, (storage_type)1, order);
953 }
954
clearboost::atomics::detail::operations955 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
956 {
957 store(storage, 0, order);
958 }
959 };
960
961 #else // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
962
963 template< >
964 struct operations< 2u, false > :
965 public operations< 4u, false >
966 {
967 typedef operations< 4u, false > base_type;
968 typedef base_type::storage_type storage_type;
969
fetch_addboost::atomics::detail::operations970 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
971 {
972 fence_before(order);
973 uint32_t tmp;
974 storage_type original, result;
975 __asm__ __volatile__
976 (
977 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
978 "1:\n"
979 "ldrex %[original], %[storage]\n" // original = *(&storage)
980 "add %[result], %[original], %[value]\n" // result = original + value
981 "uxth %[result], %[result]\n" // zero extend result from 16 to 32 bits
982 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
983 "teq %[tmp], #0\n" // flags = tmp==0
984 "bne 1b\n" // if (!flags.equal) goto retry
985 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
986 : [original] "=&r" (original), // %0
987 [result] "=&r" (result), // %1
988 [tmp] "=&l" (tmp), // %2
989 [storage] "+Q" (storage) // %3
990 : [value] "Ir" (v) // %4
991 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
992 );
993 fence_after(order);
994 return original;
995 }
996
fetch_subboost::atomics::detail::operations997 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
998 {
999 fence_before(order);
1000 uint32_t tmp;
1001 storage_type original, result;
1002 __asm__ __volatile__
1003 (
1004 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
1005 "1:\n"
1006 "ldrex %[original], %[storage]\n" // original = *(&storage)
1007 "sub %[result], %[original], %[value]\n" // result = original - value
1008 "uxth %[result], %[result]\n" // zero extend result from 16 to 32 bits
1009 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
1010 "teq %[tmp], #0\n" // flags = tmp==0
1011 "bne 1b\n" // if (!flags.equal) goto retry
1012 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
1013 : [original] "=&r" (original), // %0
1014 [result] "=&r" (result), // %1
1015 [tmp] "=&l" (tmp), // %2
1016 [storage] "+Q" (storage) // %3
1017 : [value] "Ir" (v) // %4
1018 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1019 );
1020 fence_after(order);
1021 return original;
1022 }
1023 };
1024
1025 template< >
1026 struct operations< 2u, true > :
1027 public operations< 4u, true >
1028 {
1029 typedef operations< 4u, true > base_type;
1030 typedef base_type::storage_type storage_type;
1031
fetch_addboost::atomics::detail::operations1032 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1033 {
1034 fence_before(order);
1035 uint32_t tmp;
1036 storage_type original, result;
1037 __asm__ __volatile__
1038 (
1039 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
1040 "1:\n"
1041 "ldrex %[original], %[storage]\n" // original = *(&storage)
1042 "add %[result], %[original], %[value]\n" // result = original + value
1043 "sxth %[result], %[result]\n" // sign extend result from 16 to 32 bits
1044 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
1045 "teq %[tmp], #0\n" // flags = tmp==0
1046 "bne 1b\n" // if (!flags.equal) goto retry
1047 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
1048 : [original] "=&r" (original), // %0
1049 [result] "=&r" (result), // %1
1050 [tmp] "=&l" (tmp), // %2
1051 [storage] "+Q" (storage) // %3
1052 : [value] "Ir" (v) // %4
1053 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1054 );
1055 fence_after(order);
1056 return original;
1057 }
1058
fetch_subboost::atomics::detail::operations1059 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1060 {
1061 fence_before(order);
1062 uint32_t tmp;
1063 storage_type original, result;
1064 __asm__ __volatile__
1065 (
1066 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
1067 "1:\n"
1068 "ldrex %[original], %[storage]\n" // original = *(&storage)
1069 "sub %[result], %[original], %[value]\n" // result = original - value
1070 "sxth %[result], %[result]\n" // sign extend result from 16 to 32 bits
1071 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
1072 "teq %[tmp], #0\n" // flags = tmp==0
1073 "bne 1b\n" // if (!flags.equal) goto retry
1074 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
1075 : [original] "=&r" (original), // %0
1076 [result] "=&r" (result), // %1
1077 [tmp] "=&l" (tmp), // %2
1078 [storage] "+Q" (storage) // %3
1079 : [value] "Ir" (v) // %4
1080 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
1081 );
1082 fence_after(order);
1083 return original;
1084 }
1085 };
1086
1087 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
1088
1089 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
1090
1091 // Unlike 32-bit operations, for 64-bit loads and stores we must use ldrexd/strexd.
1092 // Any other instructions result in a non-atomic sequence of 32-bit accesses.
1093 // See "ARM Architecture Reference Manual ARMv7-A and ARMv7-R edition",
1094 // Section A3.5.3 "Atomicity in the ARM architecture".
1095
1096 // In the asm blocks below we have to use 32-bit register pairs to compose 64-bit values.
1097 // In order to pass the 64-bit operands to/from asm blocks, we use undocumented gcc feature:
1098 // the lower half (Rt) of the operand is accessible normally, via the numbered placeholder (e.g. %0),
1099 // and the upper half (Rt2) - via the same placeholder with an 'H' after the '%' sign (e.g. %H0).
1100 // See: http://hardwarebug.org/2010/07/06/arm-inline-asm-secrets/
1101
1102 template< bool Signed >
1103 struct operations< 8u, Signed > :
1104 public gcc_arm_operations_base
1105 {
1106 typedef typename make_storage_type< 8u >::type storage_type;
1107 typedef typename make_storage_type< 8u >::aligned aligned_storage_type;
1108
1109 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
1110 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
1111
storeboost::atomics::detail::operations1112 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1113 {
1114 exchange(storage, v, order);
1115 }
1116
loadboost::atomics::detail::operations1117 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
1118 {
1119 storage_type original;
1120 uint32_t tmp;
1121 __asm__ __volatile__
1122 (
1123 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1124 "ldrexd %1, %H1, [%2]\n"
1125 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1126 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1127 "=&r" (original) // %1
1128 : "r" (&storage) // %2
1129 );
1130 fence_after(order);
1131 return original;
1132 }
1133
exchangeboost::atomics::detail::operations1134 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1135 {
1136 storage_type original;
1137 fence_before(order);
1138 uint32_t tmp;
1139 __asm__ __volatile__
1140 (
1141 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1142 "1:\n"
1143 "ldrexd %1, %H1, [%3]\n" // load the original value
1144 "strexd %0, %2, %H2, [%3]\n" // store the replacement, tmp = store failed
1145 "teq %0, #0\n" // check if store succeeded
1146 "bne 1b\n"
1147 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1148 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1149 "=&r" (original) // %1
1150 : "r" (v), // %2
1151 "r" (&storage) // %3
1152 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1153 );
1154 fence_after(order);
1155 return original;
1156 }
1157
compare_exchange_weakboost::atomics::detail::operations1158 static BOOST_FORCEINLINE bool compare_exchange_weak(
1159 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1160 {
1161 fence_before(success_order);
1162 uint32_t tmp;
1163 storage_type original, old_val = expected;
1164 __asm__ __volatile__
1165 (
1166 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1167 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1168 "cmp %1, %2\n" // flags = original.lo==old_val.lo
1169 "ittt eq\n" // [hint that the following 3 instructions are conditional on flags.equal]
1170 "cmpeq %H1, %H2\n" // if (flags.equal) flags = original.hi==old_val.hi
1171 "strexdeq %0, %4, %H4, [%3]\n" // if (flags.equal) *(&storage) = desired, tmp = store failed
1172 "teqeq %0, #0\n" // if (flags.equal) flags = tmp==0
1173 "ite eq\n" // [hint that the following 2 instructions are conditional on flags.equal]
1174 "moveq %2, #1\n" // if (flags.equal) old_val.lo = 1
1175 "movne %2, #0\n" // if (!flags.equal) old_val.lo = 0
1176 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1177 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1178 "=&r" (original), // %1
1179 "+r" (old_val) // %2
1180 : "r" (&storage), // %3
1181 "r" (desired) // %4
1182 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1183 );
1184 const uint32_t success = (uint32_t)old_val;
1185 if (success)
1186 fence_after(success_order);
1187 else
1188 fence_after(failure_order);
1189 expected = original;
1190 return !!success;
1191 }
1192
compare_exchange_strongboost::atomics::detail::operations1193 static BOOST_FORCEINLINE bool compare_exchange_strong(
1194 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1195 {
1196 fence_before(success_order);
1197 uint32_t tmp;
1198 storage_type original, old_val = expected;
1199 __asm__ __volatile__
1200 (
1201 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1202 "1:\n"
1203 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1204 "cmp %1, %2\n" // flags = original.lo==old_val.lo
1205 "it eq\n" // [hint that the following instruction is conditional on flags.equal]
1206 "cmpeq %H1, %H2\n" // if (flags.equal) flags = original.hi==old_val.hi
1207 "bne 2f\n" // if (!flags.equal) goto end
1208 "strexd %0, %4, %H4, [%3]\n" // *(&storage) = desired, tmp = store failed
1209 "teq %0, #0\n" // flags.equal = tmp == 0
1210 "bne 1b\n" // if (flags.equal) goto retry
1211 "2:\n"
1212 "ite eq\n" // [hint that the following 2 instructions are conditional on flags.equal]
1213 "moveq %2, #1\n" // if (flags.equal) old_val.lo = 1
1214 "movne %2, #0\n" // if (!flags.equal) old_val.lo = 0
1215 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1216 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1217 "=&r" (original), // %1
1218 "+r" (old_val) // %2
1219 : "r" (&storage), // %3
1220 "r" (desired) // %4
1221 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1222 );
1223 const uint32_t success = (uint32_t)old_val;
1224 if (success)
1225 fence_after(success_order);
1226 else
1227 fence_after(failure_order);
1228 expected = original;
1229 return !!success;
1230 }
1231
fetch_addboost::atomics::detail::operations1232 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1233 {
1234 fence_before(order);
1235 storage_type original, result;
1236 uint32_t tmp;
1237 __asm__ __volatile__
1238 (
1239 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1240 "1:\n"
1241 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1242 "adds %2, %1, %4\n" // result = original + value
1243 "adc %H2, %H1, %H4\n"
1244 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1245 "teq %0, #0\n" // flags = tmp==0
1246 "bne 1b\n" // if (!flags.equal) goto retry
1247 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1248 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1249 "=&r" (original), // %1
1250 "=&r" (result) // %2
1251 : "r" (&storage), // %3
1252 "r" (v) // %4
1253 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1254 );
1255 fence_after(order);
1256 return original;
1257 }
1258
fetch_subboost::atomics::detail::operations1259 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1260 {
1261 fence_before(order);
1262 storage_type original, result;
1263 uint32_t tmp;
1264 __asm__ __volatile__
1265 (
1266 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1267 "1:\n"
1268 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1269 "subs %2, %1, %4\n" // result = original - value
1270 "sbc %H2, %H1, %H4\n"
1271 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1272 "teq %0, #0\n" // flags = tmp==0
1273 "bne 1b\n" // if (!flags.equal) goto retry
1274 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1275 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1276 "=&r" (original), // %1
1277 "=&r" (result) // %2
1278 : "r" (&storage), // %3
1279 "r" (v) // %4
1280 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1281 );
1282 fence_after(order);
1283 return original;
1284 }
1285
fetch_andboost::atomics::detail::operations1286 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1287 {
1288 fence_before(order);
1289 storage_type original, result;
1290 uint32_t tmp;
1291 __asm__ __volatile__
1292 (
1293 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1294 "1:\n"
1295 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1296 "and %2, %1, %4\n" // result = original & value
1297 "and %H2, %H1, %H4\n"
1298 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1299 "teq %0, #0\n" // flags = tmp==0
1300 "bne 1b\n" // if (!flags.equal) goto retry
1301 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1302 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1303 "=&r" (original), // %1
1304 "=&r" (result) // %2
1305 : "r" (&storage), // %3
1306 "r" (v) // %4
1307 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1308 );
1309 fence_after(order);
1310 return original;
1311 }
1312
fetch_orboost::atomics::detail::operations1313 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1314 {
1315 fence_before(order);
1316 storage_type original, result;
1317 uint32_t tmp;
1318 __asm__ __volatile__
1319 (
1320 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1321 "1:\n"
1322 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1323 "orr %2, %1, %4\n" // result = original | value
1324 "orr %H2, %H1, %H4\n"
1325 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1326 "teq %0, #0\n" // flags = tmp==0
1327 "bne 1b\n" // if (!flags.equal) goto retry
1328 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1329 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1330 "=&r" (original), // %1
1331 "=&r" (result) // %2
1332 : "r" (&storage), // %3
1333 "r" (v) // %4
1334 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1335 );
1336 fence_after(order);
1337 return original;
1338 }
1339
fetch_xorboost::atomics::detail::operations1340 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1341 {
1342 fence_before(order);
1343 storage_type original, result;
1344 uint32_t tmp;
1345 __asm__ __volatile__
1346 (
1347 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1348 "1:\n"
1349 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1350 "eor %2, %1, %4\n" // result = original ^ value
1351 "eor %H2, %H1, %H4\n"
1352 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1353 "teq %0, #0\n" // flags = tmp==0
1354 "bne 1b\n" // if (!flags.equal) goto retry
1355 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1356 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1357 "=&r" (original), // %1
1358 "=&r" (result) // %2
1359 : "r" (&storage), // %3
1360 "r" (v) // %4
1361 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1362 );
1363 fence_after(order);
1364 return original;
1365 }
1366
test_and_setboost::atomics::detail::operations1367 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1368 {
1369 return !!exchange(storage, (storage_type)1, order);
1370 }
1371
clearboost::atomics::detail::operations1372 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1373 {
1374 store(storage, 0, order);
1375 }
1376 };
1377
1378 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
1379
1380
thread_fence(memory_order order)1381 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
1382 {
1383 if (order != memory_order_relaxed)
1384 gcc_arm_operations_base::hardware_full_fence();
1385 }
1386
signal_fence(memory_order order)1387 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
1388 {
1389 if (order != memory_order_relaxed)
1390 __asm__ __volatile__ ("" ::: "memory");
1391 }
1392
1393 } // namespace detail
1394 } // namespace atomics
1395 } // namespace boost
1396
1397 #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_ARM_HPP_INCLUDED_
1398