1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #ifndef	_SYS_ATOMIC_H
28 #define	_SYS_ATOMIC_H
29 
30 #include <sys/types.h>
31 #include <sys/inttypes.h>
32 
33 #ifdef	__cplusplus
34 extern "C" {
35 #endif
36 
37 #if defined(__STDC__)
38 /*
39  * Increment target.
40  */
41 extern void atomic_inc_8(volatile uint8_t *);
42 extern void atomic_inc_uchar(volatile uchar_t *);
43 extern void atomic_inc_16(volatile uint16_t *);
44 extern void atomic_inc_ushort(volatile ushort_t *);
45 extern void atomic_inc_32(volatile uint32_t *);
46 extern void atomic_inc_uint(volatile uint_t *);
47 extern void atomic_inc_ulong(volatile ulong_t *);
48 #if defined(_INT64_TYPE)
49 extern void atomic_inc_64(volatile uint64_t *);
50 #endif
51 
52 /*
53  * Decrement target
54  */
55 extern void atomic_dec_8(volatile uint8_t *);
56 extern void atomic_dec_uchar(volatile uchar_t *);
57 extern void atomic_dec_16(volatile uint16_t *);
58 extern void atomic_dec_ushort(volatile ushort_t *);
59 extern void atomic_dec_32(volatile uint32_t *);
60 extern void atomic_dec_uint(volatile uint_t *);
61 extern void atomic_dec_ulong(volatile ulong_t *);
62 #if defined(_INT64_TYPE)
63 extern void atomic_dec_64(volatile uint64_t *);
64 #endif
65 
66 /*
67  * Add delta to target
68  */
69 extern void atomic_add_8(volatile uint8_t *, int8_t);
70 extern void atomic_add_char(volatile uchar_t *, signed char);
71 extern void atomic_add_16(volatile uint16_t *, int16_t);
72 extern void atomic_add_short(volatile ushort_t *, short);
73 extern void atomic_add_32(volatile uint32_t *, int32_t);
74 extern void atomic_add_int(volatile uint_t *, int);
75 extern void atomic_add_ptr(volatile void *, ssize_t);
76 extern void atomic_add_long(volatile ulong_t *, long);
77 #if defined(_INT64_TYPE)
78 extern void atomic_add_64(volatile uint64_t *, int64_t);
79 #endif
80 
81 /*
82  * Subtract delta from target
83  */
84 extern void atomic_sub_8(volatile uint8_t *, int8_t);
85 extern void atomic_sub_char(volatile uchar_t *, signed char);
86 extern void atomic_sub_16(volatile uint16_t *, int16_t);
87 extern void atomic_sub_short(volatile ushort_t *, short);
88 extern void atomic_sub_32(volatile uint32_t *, int32_t);
89 extern void atomic_sub_int(volatile uint_t *, int);
90 extern void atomic_sub_ptr(volatile void *, ssize_t);
91 extern void atomic_sub_long(volatile ulong_t *, long);
92 #if defined(_INT64_TYPE)
93 extern void atomic_sub_64(volatile uint64_t *, int64_t);
94 #endif
95 
96 /*
97  * logical OR bits with target
98  */
99 extern void atomic_or_8(volatile uint8_t *, uint8_t);
100 extern void atomic_or_uchar(volatile uchar_t *, uchar_t);
101 extern void atomic_or_16(volatile uint16_t *, uint16_t);
102 extern void atomic_or_ushort(volatile ushort_t *, ushort_t);
103 extern void atomic_or_32(volatile uint32_t *, uint32_t);
104 extern void atomic_or_uint(volatile uint_t *, uint_t);
105 extern void atomic_or_ulong(volatile ulong_t *, ulong_t);
106 #if defined(_INT64_TYPE)
107 extern void atomic_or_64(volatile uint64_t *, uint64_t);
108 #endif
109 
110 /*
111  * logical AND bits with target
112  */
113 extern void atomic_and_8(volatile uint8_t *, uint8_t);
114 extern void atomic_and_uchar(volatile uchar_t *, uchar_t);
115 extern void atomic_and_16(volatile uint16_t *, uint16_t);
116 extern void atomic_and_ushort(volatile ushort_t *, ushort_t);
117 extern void atomic_and_32(volatile uint32_t *, uint32_t);
118 extern void atomic_and_uint(volatile uint_t *, uint_t);
119 extern void atomic_and_ulong(volatile ulong_t *, ulong_t);
120 #if defined(_INT64_TYPE)
121 extern void atomic_and_64(volatile uint64_t *, uint64_t);
122 #endif
123 
124 /*
125  * As above, but return the new value.  Note that these _nv() variants are
126  * substantially more expensive on some platforms than the no-return-value
127  * versions above, so don't use them unless you really need to know the
128  * new value *atomically* (e.g. when decrementing a reference count and
129  * checking whether it went to zero).
130  */
131 
132 /*
133  * Increment target and return new value.
134  */
135 extern uint8_t atomic_inc_8_nv(volatile uint8_t *);
136 extern uchar_t atomic_inc_uchar_nv(volatile uchar_t *);
137 extern uint16_t atomic_inc_16_nv(volatile uint16_t *);
138 extern ushort_t atomic_inc_ushort_nv(volatile ushort_t *);
139 extern uint32_t atomic_inc_32_nv(volatile uint32_t *);
140 extern uint_t atomic_inc_uint_nv(volatile uint_t *);
141 extern ulong_t atomic_inc_ulong_nv(volatile ulong_t *);
142 #if defined(_INT64_TYPE)
143 extern uint64_t atomic_inc_64_nv(volatile uint64_t *);
144 #endif
145 
146 /*
147  * Decrement target and return new value.
148  */
149 extern uint8_t atomic_dec_8_nv(volatile uint8_t *);
150 extern uchar_t atomic_dec_uchar_nv(volatile uchar_t *);
151 extern uint16_t atomic_dec_16_nv(volatile uint16_t *);
152 extern ushort_t atomic_dec_ushort_nv(volatile ushort_t *);
153 extern uint32_t atomic_dec_32_nv(volatile uint32_t *);
154 extern uint_t atomic_dec_uint_nv(volatile uint_t *);
155 extern ulong_t atomic_dec_ulong_nv(volatile ulong_t *);
156 #if defined(_INT64_TYPE)
157 extern uint64_t atomic_dec_64_nv(volatile uint64_t *);
158 #endif
159 
160 /*
161  * Add delta to target
162  */
163 extern uint8_t atomic_add_8_nv(volatile uint8_t *, int8_t);
164 extern uchar_t atomic_add_char_nv(volatile uchar_t *, signed char);
165 extern uint16_t atomic_add_16_nv(volatile uint16_t *, int16_t);
166 extern ushort_t atomic_add_short_nv(volatile ushort_t *, short);
167 extern uint32_t atomic_add_32_nv(volatile uint32_t *, int32_t);
168 extern uint_t atomic_add_int_nv(volatile uint_t *, int);
169 extern void *atomic_add_ptr_nv(volatile void *, ssize_t);
170 extern ulong_t atomic_add_long_nv(volatile ulong_t *, long);
171 #if defined(_INT64_TYPE)
172 extern uint64_t atomic_add_64_nv(volatile uint64_t *, int64_t);
173 #endif
174 
175 /*
176  * Subtract delta from target
177  */
178 extern uint8_t atomic_sub_8_nv(volatile uint8_t *, int8_t);
179 extern uchar_t atomic_sub_char_nv(volatile uchar_t *, signed char);
180 extern uint16_t atomic_sub_16_nv(volatile uint16_t *, int16_t);
181 extern ushort_t atomic_sub_short_nv(volatile ushort_t *, short);
182 extern uint32_t atomic_sub_32_nv(volatile uint32_t *, int32_t);
183 extern uint_t atomic_sub_int_nv(volatile uint_t *, int);
184 extern void *atomic_sub_ptr_nv(volatile void *, ssize_t);
185 extern ulong_t atomic_sub_long_nv(volatile ulong_t *, long);
186 #if defined(_INT64_TYPE)
187 extern uint64_t atomic_sub_64_nv(volatile uint64_t *, int64_t);
188 #endif
189 
190 /*
191  * logical OR bits with target and return new value.
192  */
193 extern uint8_t atomic_or_8_nv(volatile uint8_t *, uint8_t);
194 extern uchar_t atomic_or_uchar_nv(volatile uchar_t *, uchar_t);
195 extern uint16_t atomic_or_16_nv(volatile uint16_t *, uint16_t);
196 extern ushort_t atomic_or_ushort_nv(volatile ushort_t *, ushort_t);
197 extern uint32_t atomic_or_32_nv(volatile uint32_t *, uint32_t);
198 extern uint_t atomic_or_uint_nv(volatile uint_t *, uint_t);
199 extern ulong_t atomic_or_ulong_nv(volatile ulong_t *, ulong_t);
200 #if defined(_INT64_TYPE)
201 extern uint64_t atomic_or_64_nv(volatile uint64_t *, uint64_t);
202 #endif
203 
204 /*
205  * logical AND bits with target and return new value.
206  */
207 extern uint8_t atomic_and_8_nv(volatile uint8_t *, uint8_t);
208 extern uchar_t atomic_and_uchar_nv(volatile uchar_t *, uchar_t);
209 extern uint16_t atomic_and_16_nv(volatile uint16_t *, uint16_t);
210 extern ushort_t atomic_and_ushort_nv(volatile ushort_t *, ushort_t);
211 extern uint32_t atomic_and_32_nv(volatile uint32_t *, uint32_t);
212 extern uint_t atomic_and_uint_nv(volatile uint_t *, uint_t);
213 extern ulong_t atomic_and_ulong_nv(volatile ulong_t *, ulong_t);
214 #if defined(_INT64_TYPE)
215 extern uint64_t atomic_and_64_nv(volatile uint64_t *, uint64_t);
216 #endif
217 
218 /*
219  * If *arg1 == arg2, set *arg1 = arg3; return old value
220  */
221 extern uint8_t atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
222 extern uchar_t atomic_cas_uchar(volatile uchar_t *, uchar_t, uchar_t);
223 extern uint16_t atomic_cas_16(volatile uint16_t *, uint16_t, uint16_t);
224 extern ushort_t atomic_cas_ushort(volatile ushort_t *, ushort_t, ushort_t);
225 extern uint32_t atomic_cas_32(volatile uint32_t *, uint32_t, uint32_t);
226 extern uint_t atomic_cas_uint(volatile uint_t *, uint_t, uint_t);
227 extern void *atomic_cas_ptr(volatile void *, void *, void *);
228 extern ulong_t atomic_cas_ulong(volatile ulong_t *, ulong_t, ulong_t);
229 #if defined(_INT64_TYPE)
230 extern uint64_t atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
231 #endif
232 
233 /*
234  * Swap target and return old value
235  */
236 extern uint8_t atomic_swap_8(volatile uint8_t *, uint8_t);
237 extern uchar_t atomic_swap_uchar(volatile uchar_t *, uchar_t);
238 extern uint16_t atomic_swap_16(volatile uint16_t *, uint16_t);
239 extern ushort_t atomic_swap_ushort(volatile ushort_t *, ushort_t);
240 extern uint32_t atomic_swap_32(volatile uint32_t *, uint32_t);
241 extern uint_t atomic_swap_uint(volatile uint_t *, uint_t);
242 extern void *atomic_swap_ptr(volatile void *, void *);
243 extern ulong_t atomic_swap_ulong(volatile ulong_t *, ulong_t);
244 #if defined(_INT64_TYPE)
245 extern uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
246 #endif
247 
248 /*
249  * Atomically read variable.
250  */
251 #define	atomic_load_char(p)	(*(volatile uchar_t *)(p))
252 #define	atomic_load_short(p)	(*(volatile ushort_t *)(p))
253 #define	atomic_load_int(p)	(*(volatile uint_t *)(p))
254 #define	atomic_load_long(p)	(*(volatile ulong_t *)(p))
255 #define	atomic_load_ptr(p)	(*(volatile __typeof(*p) *)(p))
256 #define	atomic_load_8(p)	(*(volatile uint8_t *)(p))
257 #define	atomic_load_16(p)	(*(volatile uint16_t *)(p))
258 #define	atomic_load_32(p)	(*(volatile uint32_t *)(p))
259 #ifdef _LP64
260 #define	atomic_load_64(p)	(*(volatile uint64_t *)(p))
261 #elif defined(_INT64_TYPE)
262 extern uint64_t atomic_load_64(volatile uint64_t *);
263 #endif
264 
265 /*
266  * Atomically write variable.
267  */
268 #define	atomic_store_char(p, v)		\
269 	(*(volatile uchar_t *)(p) = (uchar_t)(v))
270 #define	atomic_store_short(p, v)	\
271 	(*(volatile ushort_t *)(p) = (ushort_t)(v))
272 #define	atomic_store_int(p, v)		\
273 	(*(volatile uint_t *)(p) = (uint_t)(v))
274 #define	atomic_store_long(p, v)		\
275 	(*(volatile ulong_t *)(p) = (ulong_t)(v))
276 #define	atomic_store_ptr(p, v)		\
277 	(*(volatile __typeof(*p) *)(p) = (v))
278 #define	atomic_store_8(p, v)		\
279 	(*(volatile uint8_t *)(p) = (uint8_t)(v))
280 #define	atomic_store_16(p, v)		\
281 	(*(volatile uint16_t *)(p) = (uint16_t)(v))
282 #define	atomic_store_32(p, v)		\
283 	(*(volatile uint32_t *)(p) = (uint32_t)(v))
284 #ifdef _LP64
285 #define	atomic_store_64(p, v)		\
286 	(*(volatile uint64_t *)(p) = (uint64_t)(v))
287 #elif defined(_INT64_TYPE)
288 extern void atomic_store_64(volatile uint64_t *, uint64_t);
289 #endif
290 
291 /*
292  * Perform an exclusive atomic bit set/clear on a target.
293  * Returns 0 if bit was successfully set/cleared, or -1
294  * if the bit was already set/cleared.
295  */
296 extern int atomic_set_long_excl(volatile ulong_t *, uint_t);
297 extern int atomic_clear_long_excl(volatile ulong_t *, uint_t);
298 
299 /*
300  * Generic memory barrier used during lock entry, placed after the
301  * memory operation that acquires the lock to guarantee that the lock
302  * protects its data.  No stores from after the memory barrier will
303  * reach visibility, and no loads from after the barrier will be
304  * resolved, before the lock acquisition reaches global visibility.
305  */
306 extern void membar_enter(void);
307 
308 /*
309  * Generic memory barrier used during lock exit, placed before the
310  * memory operation that releases the lock to guarantee that the lock
311  * protects its data.  All loads and stores issued before the barrier
312  * will be resolved before the subsequent lock update reaches visibility.
313  */
314 extern void membar_exit(void);
315 
316 /*
317  * Arrange that all stores issued before this point in the code reach
318  * global visibility before any stores that follow; useful in producer
319  * modules that update a data item, then set a flag that it is available.
320  * The memory barrier guarantees that the available flag is not visible
321  * earlier than the updated data, i.e. it imposes store ordering.
322  */
323 extern void membar_producer(void);
324 
325 /*
326  * Arrange that all loads issued before this point in the code are
327  * completed before any subsequent loads; useful in consumer modules
328  * that check to see if data is available and read the data.
329  * The memory barrier guarantees that the data is not sampled until
330  * after the available flag has been seen, i.e. it imposes load ordering.
331  */
332 extern void membar_consumer(void);
333 #endif  /* __STDC__ */
334 
335 #ifdef	__cplusplus
336 }
337 #endif
338 
339 #endif	/* _SYS_ATOMIC_H */
340