xref: /dragonfly/sys/sys/atomic_common.h (revision d42a763b)
1*d42a763bSAaron LI /*-
2*d42a763bSAaron LI  * SPDX-License-Identifier: BSD-2-Clause
3*d42a763bSAaron LI  *
4*d42a763bSAaron LI  * Copyright (c) 2017 The FreeBSD Foundation
5*d42a763bSAaron LI  *
6*d42a763bSAaron LI  * This software was developed by Konstantin Belousov <kib@FreeBSD.org>
7*d42a763bSAaron LI  * under sponsorship from the FreeBSD Foundation.
8*d42a763bSAaron LI  *
9*d42a763bSAaron LI  * Redistribution and use in source and binary forms, with or without
10*d42a763bSAaron LI  * modification, are permitted provided that the following conditions
11*d42a763bSAaron LI  * are met:
12*d42a763bSAaron LI  * 1. Redistributions of source code must retain the above copyright
13*d42a763bSAaron LI  *    notice, this list of conditions and the following disclaimer.
14*d42a763bSAaron LI  * 2. Redistributions in binary form must reproduce the above copyright
15*d42a763bSAaron LI  *    notice, this list of conditions and the following disclaimer in the
16*d42a763bSAaron LI  *    documentation and/or other materials provided with the distribution.
17*d42a763bSAaron LI  *
18*d42a763bSAaron LI  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
19*d42a763bSAaron LI  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20*d42a763bSAaron LI  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21*d42a763bSAaron LI  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
22*d42a763bSAaron LI  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23*d42a763bSAaron LI  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24*d42a763bSAaron LI  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25*d42a763bSAaron LI  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26*d42a763bSAaron LI  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27*d42a763bSAaron LI  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28*d42a763bSAaron LI  * SUCH DAMAGE.
29*d42a763bSAaron LI  */
30*d42a763bSAaron LI #ifndef _SYS_ATOMIC_COMMON_H_
31*d42a763bSAaron LI #define	_SYS_ATOMIC_COMMON_H_
32*d42a763bSAaron LI 
33*d42a763bSAaron LI #ifndef _CPU_ATOMIC_H_
34*d42a763bSAaron LI #error "Do not directly include this header, use <machine/atomic.h>."
35*d42a763bSAaron LI #endif
36*d42a763bSAaron LI 
37*d42a763bSAaron LI #include <sys/types.h>
38*d42a763bSAaron LI 
39*d42a763bSAaron LI #define	__atomic_load_bool_relaxed(p)	(*(volatile _Bool *)(p))
40*d42a763bSAaron LI #define	__atomic_load_char_relaxed(p)	(*(volatile u_char *)(p))
41*d42a763bSAaron LI #define	__atomic_load_short_relaxed(p)	(*(volatile u_short *)(p))
42*d42a763bSAaron LI #define	__atomic_load_int_relaxed(p)	(*(volatile u_int *)(p))
43*d42a763bSAaron LI #define	__atomic_load_long_relaxed(p)	(*(volatile u_long *)(p))
44*d42a763bSAaron LI #define	__atomic_load_8_relaxed(p)	(*(volatile uint8_t *)(p))
45*d42a763bSAaron LI #define	__atomic_load_16_relaxed(p)	(*(volatile uint16_t *)(p))
46*d42a763bSAaron LI #define	__atomic_load_32_relaxed(p)	(*(volatile uint32_t *)(p))
47*d42a763bSAaron LI #define	__atomic_load_64_relaxed(p)	(*(volatile uint64_t *)(p))
48*d42a763bSAaron LI 
49*d42a763bSAaron LI #define	__atomic_store_bool_relaxed(p, v)	\
50*d42a763bSAaron LI 	(*(volatile _Bool *)(p) = (_Bool)(v))
51*d42a763bSAaron LI #define	__atomic_store_char_relaxed(p, v)	\
52*d42a763bSAaron LI 	(*(volatile u_char *)(p) = (u_char)(v))
53*d42a763bSAaron LI #define	__atomic_store_short_relaxed(p, v)	\
54*d42a763bSAaron LI 	(*(volatile u_short *)(p) = (u_short)(v))
55*d42a763bSAaron LI #define	__atomic_store_int_relaxed(p, v)	\
56*d42a763bSAaron LI 	(*(volatile u_int *)(p) = (u_int)(v))
57*d42a763bSAaron LI #define	__atomic_store_long_relaxed(p, v)	\
58*d42a763bSAaron LI 	(*(volatile u_long *)(p) = (u_long)(v))
59*d42a763bSAaron LI #define	__atomic_store_8_relaxed(p, v)		\
60*d42a763bSAaron LI 	(*(volatile uint8_t *)(p) = (uint8_t)(v))
61*d42a763bSAaron LI #define	__atomic_store_16_relaxed(p, v)		\
62*d42a763bSAaron LI 	(*(volatile uint16_t *)(p) = (uint16_t)(v))
63*d42a763bSAaron LI #define	__atomic_store_32_relaxed(p, v)		\
64*d42a763bSAaron LI 	(*(volatile uint32_t *)(p) = (uint32_t)(v))
65*d42a763bSAaron LI #define	__atomic_store_64_relaxed(p, v)		\
66*d42a763bSAaron LI 	(*(volatile uint64_t *)(p) = (uint64_t)(v))
67*d42a763bSAaron LI 
68*d42a763bSAaron LI /*
69*d42a763bSAaron LI  * When _Generic is available, try to provide some type checking.
70*d42a763bSAaron LI  */
71*d42a763bSAaron LI #if (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || \
72*d42a763bSAaron LI     __has_extension(c_generic_selections)
73*d42a763bSAaron LI #define	atomic_load_bool(p)			\
74*d42a763bSAaron LI 	_Generic(*(p), _Bool: __atomic_load_bool_relaxed(p))
75*d42a763bSAaron LI #define	atomic_store_bool(p, v)			\
76*d42a763bSAaron LI 	_Generic(*(p), _Bool: __atomic_store_bool_relaxed(p, v))
77*d42a763bSAaron LI 
78*d42a763bSAaron LI #define	__atomic_load_generic(p, t, ut, n)	\
79*d42a763bSAaron LI 	_Generic(*(p),				\
80*d42a763bSAaron LI 	    t: __atomic_load_ ## n ## _relaxed(p), \
81*d42a763bSAaron LI 	    ut: __atomic_load_ ## n ## _relaxed(p))
82*d42a763bSAaron LI #define	__atomic_store_generic(p, v, t, ut, n)	\
83*d42a763bSAaron LI 	_Generic(*(p),				\
84*d42a763bSAaron LI 	    t: __atomic_store_ ## n ## _relaxed(p, v), \
85*d42a763bSAaron LI 	    ut: __atomic_store_ ## n ## _relaxed(p, v))
86*d42a763bSAaron LI #else
87*d42a763bSAaron LI #define	atomic_load_bool(p)			\
88*d42a763bSAaron LI 	__atomic_load_bool_relaxed(p)
89*d42a763bSAaron LI #define	atomic_store_bool(p, v)			\
90*d42a763bSAaron LI 	__atomic_store_bool_relaxed(p, v)
91*d42a763bSAaron LI #define	__atomic_load_generic(p, t, ut, n)	\
92*d42a763bSAaron LI 	__atomic_load_ ## n ## _relaxed(p)
93*d42a763bSAaron LI #define	__atomic_store_generic(p, v, t, ut, n)	\
94*d42a763bSAaron LI 	__atomic_store_ ## n ## _relaxed(p, v)
95*d42a763bSAaron LI #endif
96*d42a763bSAaron LI 
97*d42a763bSAaron LI #define	atomic_load_char(p)	__atomic_load_generic(p, char, u_char, char)
98*d42a763bSAaron LI #define	atomic_load_short(p)	__atomic_load_generic(p, short, u_short, short)
99*d42a763bSAaron LI #define	atomic_load_int(p)	__atomic_load_generic(p, int, u_int, int)
100*d42a763bSAaron LI #define	atomic_load_long(p)	__atomic_load_generic(p, long, u_long, long)
101*d42a763bSAaron LI #define	atomic_load_8(p)	__atomic_load_generic(p, int8_t, uint8_t, 8)
102*d42a763bSAaron LI #define	atomic_load_16(p)	__atomic_load_generic(p, int16_t, uint16_t, 16)
103*d42a763bSAaron LI #define	atomic_load_32(p)	__atomic_load_generic(p, int32_t, uint32_t, 32)
104*d42a763bSAaron LI 
105*d42a763bSAaron LI #define	atomic_store_char(p, v)			\
106*d42a763bSAaron LI 	__atomic_store_generic(p, v, char, u_char, char)
107*d42a763bSAaron LI #define	atomic_store_short(p, v)		\
108*d42a763bSAaron LI 	__atomic_store_generic(p, v, short, u_short, short)
109*d42a763bSAaron LI #define	atomic_store_int(p, v)			\
110*d42a763bSAaron LI 	__atomic_store_generic(p, v, int, u_int, int)
111*d42a763bSAaron LI #define	atomic_store_long(p, v)			\
112*d42a763bSAaron LI 	__atomic_store_generic(p, v, long, u_long, long)
113*d42a763bSAaron LI #define	atomic_store_8(p, v)			\
114*d42a763bSAaron LI 	__atomic_store_generic(p, v, int8_t, uint8_t, 8)
115*d42a763bSAaron LI #define	atomic_store_16(p, v)			\
116*d42a763bSAaron LI 	__atomic_store_generic(p, v, int16_t, uint16_t, 16)
117*d42a763bSAaron LI #define	atomic_store_32(p, v)			\
118*d42a763bSAaron LI 	__atomic_store_generic(p, v, int32_t, uint32_t, 32)
119*d42a763bSAaron LI 
120*d42a763bSAaron LI #ifdef __LP64__
121*d42a763bSAaron LI #define	atomic_load_64(p)	__atomic_load_generic(p, int64_t, uint64_t, 64)
122*d42a763bSAaron LI #define	atomic_store_64(p, v)			\
123*d42a763bSAaron LI 	__atomic_store_generic(p, v, int64_t, uint64_t, 64)
124*d42a763bSAaron LI #endif
125*d42a763bSAaron LI 
126*d42a763bSAaron LI #define	atomic_load_ptr(p)	(*(const volatile __typeof(*(p)) *)(p))
127*d42a763bSAaron LI #define	atomic_store_ptr(p, v)	\
128*d42a763bSAaron LI 	({ volatile __typeof(*(p)) *__as_ptr = (p); *__as_ptr = (v); })
129*d42a763bSAaron LI 	/* Workaround the GCC '-Wcast-qual' warning. */
130*d42a763bSAaron LI 
131*d42a763bSAaron LI #endif /* !_SYS_ATOMIC_COMMON_H_ */
132