1 /*-
2 * pmatomic.h - Poor Man's atomics
3 *
4 * Borrowed from FreeBSD (original copyright follows).
5 *
6 * Standard atomic facilities in stdatomic.h are great, unless you are
7 * stuck with an old compiler, or you attempt to compile code using
8 * stdatomic.h in C++ mode [gcc 4.9], or if you were desperate enough to
9 * enable OpenMP in C mode [gcc 4.9].
10 *
11 * There are several discrepancies between gcc and clang, namely clang
12 * refuses to apply atomic operations to non-atomic types while gcc is
13 * more tolerant.
14 *
15 * For these reasons we provide a custom implementation of operations on
16 * atomic types:
17 *
18 * A. same names/semantics as in stdatomic.h;
19 * B. all names prefixed with 'pm_' to avoid name collisions;
20 * C. applicable to non-atomic types.
21 *
22 * Ex:
23 * int i;
24 * pm_atomic_fetch_add_explicit(&i, 1, pm_memory_order_relaxed);
25 *
26 * Note: do NOT use _Atomic keyword (see gcc issues above).
27 */
28
29 /*-
30 * Migration strategy
31 *
32 * Switching to <stdatomic.h> will be relatively easy. A
33 * straightforward text replace on the codebase removes 'pm_' prefix
34 * in names. Compiling with clang reveals missing _Atomic qualifiers.
35 */
36
37 /*-
38 * Logistics
39 *
40 * In order to make it possible to merge with the updated upstream we
41 * restrict modifications in this file to the bare minimum. For this
42 * reason we comment unused code regions with #if 0 instead of removing
43 * them.
44 *
45 * Renames are carried out by a script generating the final header.
46 */
47
48 /*-
49 * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
50 * David Chisnall <theraven@FreeBSD.org>
51 * All rights reserved.
52 *
53 * Redistribution and use in source and binary forms, with or without
54 * modification, are permitted provided that the following conditions
55 * are met:
56 * 1. Redistributions of source code must retain the above copyright
57 * notice, this list of conditions and the following disclaimer.
58 * 2. Redistributions in binary form must reproduce the above copyright
59 * notice, this list of conditions and the following disclaimer in the
60 * documentation and/or other materials provided with the distribution.
61 *
62 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
63 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
64 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
65 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
66 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
67 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
68 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
69 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
70 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
71 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
72 * SUCH DAMAGE.
73 *
74 * $FreeBSD: releng/10.1/sys/sys/stdatomic.h 264496 2014-04-15 09:41:52Z tijl $
75 */
76
77 #ifndef PMATOMIC_H__
78 #define PMATOMIC_H__
79
80 /* Compiler-fu */
81 #if !defined(__has_feature)
82 #define __has_feature(x) 0
83 #endif
84 #if !defined(__has_builtin)
85 #define __has_builtin(x) __has_feature(x)
86 #endif
87 #if !defined(__GNUC_PREREQ__)
88 #if defined(__GNUC__) && defined(__GNUC_MINOR__)
89 #define __GNUC_PREREQ__(maj, min) \
90 ((__GNUC__ << 16) + __GNUC_MINOR__ >= ((maj) << 16) + (min))
91 #else
92 #define __GNUC_PREREQ__(maj, min) 0
93 #endif
94 #endif
95
96 #include <stddef.h>
97 #include <stdint.h>
98 #include <stdbool.h>
99
100 /*
101 * Removed __PM_CLANG_ATOMICS clause, this is because
102 * 1) clang understands gcc intrinsics as well;
103 * 2) clang intrinsics require _Atomic quialified types while gcc ones
104 * don't.
105 */
106 #if __GNUC_PREREQ__(4, 7)
107 #define __PM_GNUC_ATOMICS
108 #elif defined(__GNUC__)
109 #define __PM_SYNC_ATOMICS
110 #else
111 #error "pmatomic.h does not support your compiler"
112 #endif
113
114 /*
115 * 7.17.1 Atomic lock-free macros.
116 */
117 #if 0
118
119 #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
120 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
121 #endif
122 #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
123 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
124 #endif
125 #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
126 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
127 #endif
128 #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
129 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
130 #endif
131 #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
132 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
133 #endif
134 #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
135 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
136 #endif
137 #ifdef __GCC_ATOMIC_INT_LOCK_FREE
138 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
139 #endif
140 #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
141 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
142 #endif
143 #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
144 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
145 #endif
146 #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
147 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
148 #endif
149
150 #endif
151
152 /*
153 * 7.17.2 Initialization.
154 */
155 #if 0
156
157 #if defined(__PM_CLANG_ATOMICS)
158 #define ATOMIC_VAR_INIT(value) (value)
159 #define atomic_init(obj, value) __c11_atomic_init(obj, value)
160 #else
161 #define ATOMIC_VAR_INIT(value) { .__val = (value) }
162 #define atomic_init(obj, value) ((void)((obj)->__val = (value)))
163 #endif
164
165 #endif
166
167 /*
168 * Clang and recent GCC both provide predefined macros for the memory
169 * orderings. If we are using a compiler that doesn't define them, use the
170 * clang values - these will be ignored in the fallback path.
171 */
172
173 #ifndef __ATOMIC_RELAXED
174 #define __ATOMIC_RELAXED 0
175 #endif
176 #ifndef __ATOMIC_CONSUME
177 #define __ATOMIC_CONSUME 1
178 #endif
179 #ifndef __ATOMIC_ACQUIRE
180 #define __ATOMIC_ACQUIRE 2
181 #endif
182 #ifndef __ATOMIC_RELEASE
183 #define __ATOMIC_RELEASE 3
184 #endif
185 #ifndef __ATOMIC_ACQ_REL
186 #define __ATOMIC_ACQ_REL 4
187 #endif
188 #ifndef __ATOMIC_SEQ_CST
189 #define __ATOMIC_SEQ_CST 5
190 #endif
191
192 /*
193 * 7.17.3 Order and consistency.
194 *
195 * The pm_memory_order_* constants that denote the barrier behaviour of the
196 * atomic operations.
197 */
198
199 typedef enum {
200 pm_memory_order_relaxed = __ATOMIC_RELAXED,
201 pm_memory_order_consume = __ATOMIC_CONSUME,
202 pm_memory_order_acquire = __ATOMIC_ACQUIRE,
203 pm_memory_order_release = __ATOMIC_RELEASE,
204 pm_memory_order_acq_rel = __ATOMIC_ACQ_REL,
205 pm_memory_order_seq_cst = __ATOMIC_SEQ_CST
206 } pm_memory_order;
207
208 /*
209 * 7.17.4 Fences.
210 */
211
212 static __inline void
pm_atomic_thread_fence(pm_memory_order __order)213 pm_atomic_thread_fence(pm_memory_order __order __attribute__((__unused__)))
214 {
215
216 #ifdef __PM_CLANG_ATOMICS
217 __c11_atomic_thread_fence(__order);
218 #elif defined(__PM_GNUC_ATOMICS)
219 __atomic_thread_fence(__order);
220 #else
221 __sync_synchronize();
222 #endif
223 }
224
225 static __inline void
pm_atomic_signal_fence(pm_memory_order __order)226 pm_atomic_signal_fence(pm_memory_order __order __attribute__((__unused__)))
227 {
228
229 #ifdef __PM_CLANG_ATOMICS
230 __c11_atomic_signal_fence(__order);
231 #elif defined(__PM_GNUC_ATOMICS)
232 __atomic_signal_fence(__order);
233 #else
234 __asm volatile ("" ::: "memory");
235 #endif
236 }
237
238 /*
239 * 7.17.5 Lock-free property.
240 */
241 #if 0
242
243 #if defined(_KERNEL)
244 /* Atomics in kernelspace are always lock-free. */
245 #define atomic_is_lock_free(obj) \
246 ((void)(obj), (bool)1)
247 #elif defined(__PM_CLANG_ATOMICS)
248 #define atomic_is_lock_free(obj) \
249 __atomic_is_lock_free(sizeof(*(obj)), obj)
250 #elif defined(__PM_GNUC_ATOMICS)
251 #define atomic_is_lock_free(obj) \
252 __atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
253 #else
254 #define atomic_is_lock_free(obj) \
255 ((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
256 #endif
257
258 #endif
259
260 /*
261 * 7.17.6 Atomic integer types.
262 */
263 #if 0
264
265 typedef _Atomic(bool) atomic_bool;
266 typedef _Atomic(char) atomic_char;
267 typedef _Atomic(signed char) atomic_schar;
268 typedef _Atomic(unsigned char) atomic_uchar;
269 typedef _Atomic(short) atomic_short;
270 typedef _Atomic(unsigned short) atomic_ushort;
271 typedef _Atomic(int) atomic_int;
272 typedef _Atomic(unsigned int) atomic_uint;
273 typedef _Atomic(long) atomic_long;
274 typedef _Atomic(unsigned long) atomic_ulong;
275 typedef _Atomic(long long) atomic_llong;
276 typedef _Atomic(unsigned long long) atomic_ullong;
277 typedef _Atomic(__char16_t) atomic_char16_t;
278 typedef _Atomic(__char32_t) atomic_char32_t;
279 typedef _Atomic(___wchar_t) atomic_wchar_t;
280 typedef _Atomic(__int_least8_t) atomic_int_least8_t;
281 typedef _Atomic(__uint_least8_t) atomic_uint_least8_t;
282 typedef _Atomic(__int_least16_t) atomic_int_least16_t;
283 typedef _Atomic(__uint_least16_t) atomic_uint_least16_t;
284 typedef _Atomic(__int_least32_t) atomic_int_least32_t;
285 typedef _Atomic(__uint_least32_t) atomic_uint_least32_t;
286 typedef _Atomic(__int_least64_t) atomic_int_least64_t;
287 typedef _Atomic(__uint_least64_t) atomic_uint_least64_t;
288 typedef _Atomic(__int_fast8_t) atomic_int_fast8_t;
289 typedef _Atomic(__uint_fast8_t) atomic_uint_fast8_t;
290 typedef _Atomic(__int_fast16_t) atomic_int_fast16_t;
291 typedef _Atomic(__uint_fast16_t) atomic_uint_fast16_t;
292 typedef _Atomic(__int_fast32_t) atomic_int_fast32_t;
293 typedef _Atomic(__uint_fast32_t) atomic_uint_fast32_t;
294 typedef _Atomic(__int_fast64_t) atomic_int_fast64_t;
295 typedef _Atomic(__uint_fast64_t) atomic_uint_fast64_t;
296 typedef _Atomic(__intptr_t) atomic_intptr_t;
297 typedef _Atomic(__uintptr_t) atomic_uintptr_t;
298 typedef _Atomic(__size_t) atomic_size_t;
299 typedef _Atomic(__ptrdiff_t) atomic_ptrdiff_t;
300 typedef _Atomic(__intmax_t) atomic_intmax_t;
301 typedef _Atomic(__uintmax_t) atomic_uintmax_t;
302
303 #endif
304
305 /*
306 * 7.17.7 Operations on atomic types.
307 */
308
309 /*
310 * Compiler-specific operations.
311 */
312
313 #if defined(__PM_CLANG_ATOMICS)
314 #define pm_atomic_compare_exchange_strong_explicit(object, expected, \
315 desired, success, failure) \
316 __c11_atomic_compare_exchange_strong(object, expected, desired, \
317 success, failure)
318 #define pm_atomic_compare_exchange_weak_explicit(object, expected, \
319 desired, success, failure) \
320 __c11_atomic_compare_exchange_weak(object, expected, desired, \
321 success, failure)
322 #define pm_atomic_exchange_explicit(object, desired, order) \
323 __c11_atomic_exchange(object, desired, order)
324 #define pm_atomic_fetch_add_explicit(object, operand, order) \
325 __c11_atomic_fetch_add(object, operand, order)
326 #define pm_atomic_fetch_and_explicit(object, operand, order) \
327 __c11_atomic_fetch_and(object, operand, order)
328 #define pm_atomic_fetch_or_explicit(object, operand, order) \
329 __c11_atomic_fetch_or(object, operand, order)
330 #define pm_atomic_fetch_sub_explicit(object, operand, order) \
331 __c11_atomic_fetch_sub(object, operand, order)
332 #define pm_atomic_fetch_xor_explicit(object, operand, order) \
333 __c11_atomic_fetch_xor(object, operand, order)
334 #define pm_atomic_load_explicit(object, order) \
335 __c11_atomic_load(object, order)
336 #define pm_atomic_store_explicit(object, desired, order) \
337 __c11_atomic_store(object, desired, order)
338 #elif defined(__PM_GNUC_ATOMICS)
339 #define pm_atomic_compare_exchange_strong_explicit(object, expected, \
340 desired, success, failure) \
341 __atomic_compare_exchange_n(object, expected, \
342 desired, 0, success, failure)
343 #define pm_atomic_compare_exchange_weak_explicit(object, expected, \
344 desired, success, failure) \
345 __atomic_compare_exchange_n(object, expected, \
346 desired, 1, success, failure)
347 #define pm_atomic_exchange_explicit(object, desired, order) \
348 __atomic_exchange_n(object, desired, order)
349 #define pm_atomic_fetch_add_explicit(object, operand, order) \
350 __atomic_fetch_add(object, operand, order)
351 #define pm_atomic_fetch_and_explicit(object, operand, order) \
352 __atomic_fetch_and(object, operand, order)
353 #define pm_atomic_fetch_or_explicit(object, operand, order) \
354 __atomic_fetch_or(object, operand, order)
355 #define pm_atomic_fetch_sub_explicit(object, operand, order) \
356 __atomic_fetch_sub(object, operand, order)
357 #define pm_atomic_fetch_xor_explicit(object, operand, order) \
358 __atomic_fetch_xor(object, operand, order)
359 #define pm_atomic_load_explicit(object, order) \
360 __atomic_load_n(object, order)
361 #define pm_atomic_store_explicit(object, desired, order) \
362 __atomic_store_n(object, desired, order)
363 #else
364 #define __pm_atomic_apply_stride(object, operand) \
365 (((__typeof__(*(object)))0) + (operand))
366 #define pm_atomic_compare_exchange_strong_explicit(object, expected, \
367 desired, success, failure) __extension__ ({ \
368 __typeof__(expected) __ep = (expected); \
369 __typeof__(*__ep) __e = *__ep; \
370 (void)(success); (void)(failure); \
371 (bool)((*__ep = __sync_val_compare_and_swap(object, \
372 __e, desired)) == __e); \
373 })
374 #define pm_atomic_compare_exchange_weak_explicit(object, expected, \
375 desired, success, failure) \
376 pm_atomic_compare_exchange_strong_explicit(object, expected, \
377 desired, success, failure)
378 #if __has_builtin(__sync_swap)
379 /* Clang provides a full-barrier atomic exchange - use it if available. */
380 #define pm_atomic_exchange_explicit(object, desired, order) \
381 ((void)(order), __sync_swap(object, desired))
382 #else
383 /*
384 * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
385 * practice it is usually a full barrier) so we need an explicit barrier before
386 * it.
387 */
388 #define pm_atomic_exchange_explicit(object, desired, order) \
389 __extension__ ({ \
390 __typeof__(object) __o = (object); \
391 __typeof__(desired) __d = (desired); \
392 (void)(order); \
393 __sync_synchronize(); \
394 __sync_lock_test_and_set(__o, __d); \
395 })
396 #endif
397 #define pm_atomic_fetch_add_explicit(object, operand, order) \
398 ((void)(order), __sync_fetch_and_add(object, \
399 __pm_atomic_apply_stride(object, operand)))
400 #define pm_atomic_fetch_and_explicit(object, operand, order) \
401 ((void)(order), __sync_fetch_and_and(object, operand))
402 #define pm_atomic_fetch_or_explicit(object, operand, order) \
403 ((void)(order), __sync_fetch_and_or(object, operand))
404 #define pm_atomic_fetch_sub_explicit(object, operand, order) \
405 ((void)(order), __sync_fetch_and_sub(object, \
406 __pm_atomic_apply_stride(object, operand)))
407 #define pm_atomic_fetch_xor_explicit(object, operand, order) \
408 ((void)(order), __sync_fetch_and_xor(object, operand))
409 #define pm_atomic_load_explicit(object, order) \
410 ((void)(order), __sync_fetch_and_add(object, 0))
411 #define pm_atomic_store_explicit(object, desired, order) \
412 ((void)pm_atomic_exchange_explicit(object, desired, order))
413 #endif
414
415 /*
416 * Convenience functions.
417 *
418 * Don't provide these in kernel space. In kernel space, we should be
419 * disciplined enough to always provide explicit barriers.
420 */
421
422 #ifndef _KERNEL
423 #define pm_atomic_compare_exchange_strong(object, expected, desired) \
424 pm_atomic_compare_exchange_strong_explicit(object, expected, \
425 desired, pm_memory_order_seq_cst, pm_memory_order_seq_cst)
426 #define pm_atomic_compare_exchange_weak(object, expected, desired) \
427 pm_atomic_compare_exchange_weak_explicit(object, expected, \
428 desired, pm_memory_order_seq_cst, pm_memory_order_seq_cst)
429 #define pm_atomic_exchange(object, desired) \
430 pm_atomic_exchange_explicit(object, desired, pm_memory_order_seq_cst)
431 #define pm_atomic_fetch_add(object, operand) \
432 pm_atomic_fetch_add_explicit(object, operand, pm_memory_order_seq_cst)
433 #define pm_atomic_fetch_and(object, operand) \
434 pm_atomic_fetch_and_explicit(object, operand, pm_memory_order_seq_cst)
435 #define pm_atomic_fetch_or(object, operand) \
436 pm_atomic_fetch_or_explicit(object, operand, pm_memory_order_seq_cst)
437 #define pm_atomic_fetch_sub(object, operand) \
438 pm_atomic_fetch_sub_explicit(object, operand, pm_memory_order_seq_cst)
439 #define pm_atomic_fetch_xor(object, operand) \
440 pm_atomic_fetch_xor_explicit(object, operand, pm_memory_order_seq_cst)
441 #define pm_atomic_load(object) \
442 pm_atomic_load_explicit(object, pm_memory_order_seq_cst)
443 #define pm_atomic_store(object, desired) \
444 pm_atomic_store_explicit(object, desired, pm_memory_order_seq_cst)
445 #endif /* !_KERNEL */
446
447 /*
448 * 7.17.8 Atomic flag type and operations.
449 *
450 * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
451 * kind of compiler built-in type we could use?
452 */
453 #if 0
454
455 typedef struct {
456 atomic_bool __flag;
457 } atomic_flag;
458
459 #define ATOMIC_FLAG_INIT { ATOMIC_VAR_INIT(0) }
460
461 static __inline bool
462 atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
463 pm_memory_order __order)
464 {
465 return (pm_atomic_exchange_explicit(&__object->__flag, 1, __order));
466 }
467
468 static __inline void
469 atomic_flag_clear_explicit(volatile atomic_flag *__object, pm_memory_order __order)
470 {
471
472 pm_atomic_store_explicit(&__object->__flag, 0, __order);
473 }
474
475 #ifndef _KERNEL
476 static __inline bool
477 atomic_flag_test_and_set(volatile atomic_flag *__object)
478 {
479
480 return (atomic_flag_test_and_set_explicit(__object,
481 pm_memory_order_seq_cst));
482 }
483
484 static __inline void
485 atomic_flag_clear(volatile atomic_flag *__object)
486 {
487
488 atomic_flag_clear_explicit(__object, pm_memory_order_seq_cst);
489 }
490 #endif /* !_KERNEL */
491
492 #endif
493
494 #endif /* !_STDATOMIC_H_ */
495