xref: /freebsd/sys/powerpc/include/atomic.h (revision aa0a1e58)
1 /*-
2  * Copyright (c) 2008 Marcel Moolenaar
3  * Copyright (c) 2001 Benno Rice
4  * Copyright (c) 2001 David E. O'Brien
5  * Copyright (c) 1998 Doug Rabson
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27  * SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #ifndef _SYS_CDEFS_H_
36 #error this file needs sys/cdefs.h as a prerequisite
37 #endif
38 
39 #define	__ATOMIC_BARRIER					\
40     __asm __volatile("sync" : : : "memory")
41 
42 #define mb()	__ATOMIC_BARRIER
43 #define	wmb()	mb()
44 #define	rmb()	mb()
45 
46 /*
47  * atomic_add(p, v)
48  * { *p += v; }
49  */
50 
51 #define __ATOMIC_ADD_8(p, v, t)					\
52     8-bit atomic_add not implemented
53 
54 #define __ATOMIC_ADD_16(p, v, t)				\
55     16-bit atomic_add not implemented
56 
57 #define __ATOMIC_ADD_32(p, v, t)				\
58     __asm __volatile(						\
59 	"1:	lwarx	%0, 0, %2\n"				\
60 	"	add	%0, %3, %0\n"				\
61 	"	stwcx.	%0, 0, %2\n"				\
62 	"	bne-	1b\n"					\
63 	: "=&r" (t), "=m" (*p)					\
64 	: "r" (p), "r" (v), "m" (*p)				\
65 	: "cc", "memory")					\
66     /* __ATOMIC_ADD_32 */
67 
68 #ifdef __powerpc64__
69 #define __ATOMIC_ADD_64(p, v, t)				\
70     __asm __volatile(						\
71 	"1:	ldarx	%0, 0, %2\n"				\
72 	"	add	%0, %3, %0\n"				\
73 	"	stdcx.	%0, 0, %2\n"				\
74 	"	bne-	1b\n"					\
75 	: "=&r" (t), "=m" (*p)					\
76 	: "r" (p), "r" (v), "m" (*p)				\
77 	: "cc", "memory")					\
78     /* __ATOMIC_ADD_64 */
79 #else
80 #define	__ATOMIC_ADD_64(p, v, t)				\
81     64-bit atomic_add not implemented
82 #endif
83 
84 #define	_ATOMIC_ADD(width, suffix, type)			\
85     static __inline void					\
86     atomic_add_##suffix(volatile type *p, type v) {		\
87 	type t;							\
88 	__ATOMIC_ADD_##width(p, v, t);				\
89     }								\
90 								\
91     static __inline void					\
92     atomic_add_acq_##suffix(volatile type *p, type v) {		\
93 	type t;							\
94 	__ATOMIC_ADD_##width(p, v, t);				\
95 	__ATOMIC_BARRIER;					\
96     }								\
97 								\
98     static __inline void					\
99     atomic_add_rel_##suffix(volatile type *p, type v) {		\
100 	type t;							\
101 	__ATOMIC_BARRIER;					\
102 	__ATOMIC_ADD_##width(p, v, t);				\
103     }								\
104     /* _ATOMIC_ADD */
105 
106 #if 0
107 _ATOMIC_ADD(8, 8, uint8_t)
108 _ATOMIC_ADD(8, char, u_char)
109 _ATOMIC_ADD(16, 16, uint16_t)
110 _ATOMIC_ADD(16, short, u_short)
111 #endif
112 _ATOMIC_ADD(32, 32, uint32_t)
113 _ATOMIC_ADD(32, int, u_int)
114 #ifdef __powerpc64__
115 _ATOMIC_ADD(64, 64, uint64_t)
116 _ATOMIC_ADD(64, long, u_long)
117 _ATOMIC_ADD(64, ptr, uintptr_t)
118 #else
119 _ATOMIC_ADD(32, long, u_long)
120 _ATOMIC_ADD(32, ptr, uintptr_t)
121 #endif
122 
123 #undef _ATOMIC_ADD
124 #undef __ATOMIC_ADD_64
125 #undef __ATOMIC_ADD_32
126 #undef __ATOMIC_ADD_16
127 #undef __ATOMIC_ADD_8
128 
129 /*
130  * atomic_clear(p, v)
131  * { *p &= ~v; }
132  */
133 
134 #define __ATOMIC_CLEAR_8(p, v, t)				\
135     8-bit atomic_clear not implemented
136 
137 #define __ATOMIC_CLEAR_16(p, v, t)				\
138     16-bit atomic_clear not implemented
139 
140 #define __ATOMIC_CLEAR_32(p, v, t)				\
141     __asm __volatile(						\
142 	"1:	lwarx	%0, 0, %2\n"				\
143 	"	andc	%0, %0, %3\n"				\
144 	"	stwcx.	%0, 0, %2\n"				\
145 	"	bne-	1b\n"					\
146 	: "=&r" (t), "=m" (*p)					\
147 	: "r" (p), "r" (v), "m" (*p)				\
148 	: "cc", "memory")					\
149     /* __ATOMIC_CLEAR_32 */
150 
151 #ifdef __powerpc64__
152 #define __ATOMIC_CLEAR_64(p, v, t)				\
153     __asm __volatile(						\
154 	"1:	ldarx	%0, 0, %2\n"				\
155 	"	andc	%0, %0, %3\n"				\
156 	"	stdcx.	%0, 0, %2\n"				\
157 	"	bne-	1b\n"					\
158 	: "=&r" (t), "=m" (*p)					\
159 	: "r" (p), "r" (v), "m" (*p)				\
160 	: "cc", "memory")					\
161     /* __ATOMIC_CLEAR_64 */
162 #else
163 #define	__ATOMIC_CLEAR_64(p, v, t)				\
164     64-bit atomic_clear not implemented
165 #endif
166 
167 #define	_ATOMIC_CLEAR(width, suffix, type)			\
168     static __inline void					\
169     atomic_clear_##suffix(volatile type *p, type v) {		\
170 	type t;							\
171 	__ATOMIC_CLEAR_##width(p, v, t);			\
172     }								\
173 								\
174     static __inline void					\
175     atomic_clear_acq_##suffix(volatile type *p, type v) {	\
176 	type t;							\
177 	__ATOMIC_CLEAR_##width(p, v, t);			\
178 	__ATOMIC_BARRIER;					\
179     }								\
180 								\
181     static __inline void					\
182     atomic_clear_rel_##suffix(volatile type *p, type v) {	\
183 	type t;							\
184 	__ATOMIC_BARRIER;					\
185 	__ATOMIC_CLEAR_##width(p, v, t);			\
186     }								\
187     /* _ATOMIC_CLEAR */
188 
189 #if 0
190 _ATOMIC_CLEAR(8, 8, uint8_t)
191 _ATOMIC_CLEAR(8, char, u_char)
192 _ATOMIC_CLEAR(16, 16, uint16_t)
193 _ATOMIC_CLEAR(16, short, u_short)
194 #endif
195 _ATOMIC_CLEAR(32, 32, uint32_t)
196 _ATOMIC_CLEAR(32, int, u_int)
197 #ifdef __powerpc64__
198 _ATOMIC_CLEAR(64, 64, uint64_t)
199 _ATOMIC_CLEAR(64, long, u_long)
200 _ATOMIC_CLEAR(64, ptr, uintptr_t)
201 #else
202 _ATOMIC_CLEAR(32, long, u_long)
203 _ATOMIC_CLEAR(32, ptr, uintptr_t)
204 #endif
205 
206 #undef _ATOMIC_CLEAR
207 #undef __ATOMIC_CLEAR_64
208 #undef __ATOMIC_CLEAR_32
209 #undef __ATOMIC_CLEAR_16
210 #undef __ATOMIC_CLEAR_8
211 
212 /*
213  * atomic_cmpset(p, o, n)
214  */
215 /* TODO -- see below */
216 
217 /*
218  * atomic_load_acq(p)
219  */
220 /* TODO -- see below */
221 
222 /*
223  * atomic_readandclear(p)
224  */
225 /* TODO -- see below */
226 
227 /*
228  * atomic_set(p, v)
229  * { *p |= v; }
230  */
231 
232 #define __ATOMIC_SET_8(p, v, t)					\
233     8-bit atomic_set not implemented
234 
235 #define __ATOMIC_SET_16(p, v, t)				\
236     16-bit atomic_set not implemented
237 
238 #define __ATOMIC_SET_32(p, v, t)				\
239     __asm __volatile(						\
240 	"1:	lwarx	%0, 0, %2\n"				\
241 	"	or	%0, %3, %0\n"				\
242 	"	stwcx.	%0, 0, %2\n"				\
243 	"	bne-	1b\n"					\
244 	: "=&r" (t), "=m" (*p)					\
245 	: "r" (p), "r" (v), "m" (*p)				\
246 	: "cc", "memory")					\
247     /* __ATOMIC_SET_32 */
248 
249 #ifdef __powerpc64__
250 #define __ATOMIC_SET_64(p, v, t)				\
251     __asm __volatile(						\
252 	"1:	ldarx	%0, 0, %2\n"				\
253 	"	or	%0, %3, %0\n"				\
254 	"	stdcx.	%0, 0, %2\n"				\
255 	"	bne-	1b\n"					\
256 	: "=&r" (t), "=m" (*p)					\
257 	: "r" (p), "r" (v), "m" (*p)				\
258 	: "cc", "memory")					\
259     /* __ATOMIC_SET_64 */
260 #else
261 #define	__ATOMIC_SET_64(p, v, t)				\
262     64-bit atomic_set not implemented
263 #endif
264 
265 #define	_ATOMIC_SET(width, suffix, type)			\
266     static __inline void					\
267     atomic_set_##suffix(volatile type *p, type v) {		\
268 	type t;							\
269 	__ATOMIC_SET_##width(p, v, t);				\
270     }								\
271 								\
272     static __inline void					\
273     atomic_set_acq_##suffix(volatile type *p, type v) {		\
274 	type t;							\
275 	__ATOMIC_SET_##width(p, v, t);				\
276 	__ATOMIC_BARRIER;					\
277     }								\
278 								\
279     static __inline void					\
280     atomic_set_rel_##suffix(volatile type *p, type v) {		\
281 	type t;							\
282 	__ATOMIC_BARRIER;					\
283 	__ATOMIC_SET_##width(p, v, t);				\
284     }								\
285     /* _ATOMIC_SET */
286 
287 #if 0
288 _ATOMIC_SET(8, 8, uint8_t)
289 _ATOMIC_SET(8, char, u_char)
290 _ATOMIC_SET(16, 16, uint16_t)
291 _ATOMIC_SET(16, short, u_short)
292 #endif
293 _ATOMIC_SET(32, 32, uint32_t)
294 _ATOMIC_SET(32, int, u_int)
295 #ifdef __powerpc64__
296 _ATOMIC_SET(64, 64, uint64_t)
297 _ATOMIC_SET(64, long, u_long)
298 _ATOMIC_SET(64, ptr, uintptr_t)
299 #else
300 _ATOMIC_SET(32, long, u_long)
301 _ATOMIC_SET(32, ptr, uintptr_t)
302 #endif
303 
304 #undef _ATOMIC_SET
305 #undef __ATOMIC_SET_64
306 #undef __ATOMIC_SET_32
307 #undef __ATOMIC_SET_16
308 #undef __ATOMIC_SET_8
309 
310 /*
311  * atomic_subtract(p, v)
312  * { *p -= v; }
313  */
314 
315 #define __ATOMIC_SUBTRACT_8(p, v, t)				\
316     8-bit atomic_subtract not implemented
317 
318 #define __ATOMIC_SUBTRACT_16(p, v, t)				\
319     16-bit atomic_subtract not implemented
320 
321 #define __ATOMIC_SUBTRACT_32(p, v, t)				\
322     __asm __volatile(						\
323 	"1:	lwarx	%0, 0, %2\n"				\
324 	"	subf	%0, %3, %0\n"				\
325 	"	stwcx.	%0, 0, %2\n"				\
326 	"	bne-	1b\n"					\
327 	: "=&r" (t), "=m" (*p)					\
328 	: "r" (p), "r" (v), "m" (*p)				\
329 	: "cc", "memory")					\
330     /* __ATOMIC_SUBTRACT_32 */
331 
332 #ifdef __powerpc64__
333 #define __ATOMIC_SUBTRACT_64(p, v, t)				\
334     __asm __volatile(						\
335 	"1:	ldarx	%0, 0, %2\n"				\
336 	"	subf	%0, %3, %0\n"				\
337 	"	stdcx.	%0, 0, %2\n"				\
338 	"	bne-	1b\n"					\
339 	: "=&r" (t), "=m" (*p)					\
340 	: "r" (p), "r" (v), "m" (*p)				\
341 	: "cc", "memory")					\
342     /* __ATOMIC_SUBTRACT_64 */
343 #else
344 #define	__ATOMIC_SUBTRACT_64(p, v, t)				\
345     64-bit atomic_subtract not implemented
346 #endif
347 
348 #define	_ATOMIC_SUBTRACT(width, suffix, type)			\
349     static __inline void					\
350     atomic_subtract_##suffix(volatile type *p, type v) {	\
351 	type t;							\
352 	__ATOMIC_SUBTRACT_##width(p, v, t);			\
353     }								\
354 								\
355     static __inline void					\
356     atomic_subtract_acq_##suffix(volatile type *p, type v) {	\
357 	type t;							\
358 	__ATOMIC_SUBTRACT_##width(p, v, t);			\
359 	__ATOMIC_BARRIER;					\
360     }								\
361 								\
362     static __inline void					\
363     atomic_subtract_rel_##suffix(volatile type *p, type v) {	\
364 	type t;							\
365 	__ATOMIC_BARRIER;					\
366 	__ATOMIC_SUBTRACT_##width(p, v, t);			\
367     }								\
368     /* _ATOMIC_SUBTRACT */
369 
370 #if 0
371 _ATOMIC_SUBTRACT(8, 8, uint8_t)
372 _ATOMIC_SUBTRACT(8, char, u_char)
373 _ATOMIC_SUBTRACT(16, 16, uint16_t)
374 _ATOMIC_SUBTRACT(16, short, u_short)
375 #endif
376 _ATOMIC_SUBTRACT(32, 32, uint32_t)
377 _ATOMIC_SUBTRACT(32, int, u_int)
378 #ifdef __powerpc64__
379 _ATOMIC_SUBTRACT(64, 64, uint64_t)
380 _ATOMIC_SUBTRACT(64, long, u_long)
381 _ATOMIC_SUBTRACT(64, ptr, uintptr_t)
382 #else
383 _ATOMIC_SUBTRACT(32, long, u_long)
384 _ATOMIC_SUBTRACT(32, ptr, uintptr_t)
385 #endif
386 
387 #undef _ATOMIC_SUBTRACT
388 #undef __ATOMIC_SUBTRACT_64
389 #undef __ATOMIC_SUBTRACT_32
390 #undef __ATOMIC_SUBTRACT_16
391 #undef __ATOMIC_SUBTRACT_8
392 
393 /*
394  * atomic_store_rel(p, v)
395  */
396 /* TODO -- see below */
397 
398 /*
399  * Old/original implementations that still need revisiting.
400  */
401 
402 static __inline uint32_t
403 atomic_readandclear_32(volatile uint32_t *addr)
404 {
405 	uint32_t result,temp;
406 
407 #ifdef __GNUCLIKE_ASM
408 	__asm __volatile (
409 		"\tsync\n"			/* drain writes */
410 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
411 		"li %1, 0\n\t"			/* load new value */
412 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
413 		"bne- 1b\n\t"			/* spin if failed */
414 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
415 		: "r" (addr), "m" (*addr)
416 		: "cc", "memory");
417 #endif
418 
419 	return (result);
420 }
421 
422 #ifdef __powerpc64__
423 static __inline uint64_t
424 atomic_readandclear_64(volatile uint64_t *addr)
425 {
426 	uint64_t result,temp;
427 
428 #ifdef __GNUCLIKE_ASM
429 	__asm __volatile (
430 		"\tsync\n"			/* drain writes */
431 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
432 		"li %1, 0\n\t"			/* load new value */
433 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
434 		"bne- 1b\n\t"			/* spin if failed */
435 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
436 		: "r" (addr), "m" (*addr)
437 		: "cc", "memory");
438 #endif
439 
440 	return (result);
441 }
442 #endif
443 
444 #define	atomic_readandclear_int		atomic_readandclear_32
445 
446 #ifdef __powerpc64__
447 #define	atomic_readandclear_long	atomic_readandclear_64
448 #define	atomic_readandclear_ptr		atomic_readandclear_64
449 #else
450 #define	atomic_readandclear_long	atomic_readandclear_32
451 #define	atomic_readandclear_ptr		atomic_readandclear_32
452 #endif
453 
454 /*
455  * We assume that a = b will do atomic loads and stores.
456  */
457 #define	ATOMIC_STORE_LOAD(TYPE, WIDTH)				\
458 static __inline u_##TYPE					\
459 atomic_load_acq_##WIDTH(volatile u_##TYPE *p)			\
460 {								\
461 	u_##TYPE v;						\
462 								\
463 	v = *p;							\
464 	__ATOMIC_BARRIER;					\
465 	return (v);						\
466 }								\
467 								\
468 static __inline void						\
469 atomic_store_rel_##WIDTH(volatile u_##TYPE *p, u_##TYPE v)	\
470 {								\
471 	__ATOMIC_BARRIER;					\
472 	*p = v;							\
473 }								\
474 								\
475 static __inline u_##TYPE					\
476 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
477 {								\
478 	u_##TYPE v;						\
479 								\
480 	v = *p;							\
481 	__ATOMIC_BARRIER;					\
482 	return (v);						\
483 }								\
484 								\
485 static __inline void						\
486 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
487 {								\
488 	__ATOMIC_BARRIER;					\
489 	*p = v;							\
490 }
491 
492 ATOMIC_STORE_LOAD(char,		8)
493 ATOMIC_STORE_LOAD(short,	16)
494 ATOMIC_STORE_LOAD(int,		32)
495 #ifdef __powerpc64__
496 ATOMIC_STORE_LOAD(long,		64)
497 #endif
498 
499 #ifdef __powerpc64__
500 #define	atomic_load_acq_long	atomic_load_acq_64
501 #define	atomic_store_rel_long	atomic_store_rel_64
502 #define	atomic_load_acq_ptr	atomic_load_acq_64
503 #define	atomic_store_rel_ptr	atomic_store_rel_64
504 #else
505 #define	atomic_load_acq_long	atomic_load_acq_32
506 #define	atomic_store_rel_long	atomic_store_rel_32
507 #define	atomic_load_acq_ptr	atomic_load_acq_32
508 #define	atomic_store_rel_ptr	atomic_store_rel_32
509 #endif
510 
511 #undef ATOMIC_STORE_LOAD
512 
513 /*
514  * Atomically compare the value stored at *p with cmpval and if the
515  * two values are equal, update the value of *p with newval. Returns
516  * zero if the compare failed, nonzero otherwise.
517  */
518 static __inline int
519 atomic_cmpset_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
520 {
521 	int	ret;
522 
523 #ifdef __GNUCLIKE_ASM
524 	__asm __volatile (
525 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
526 		"cmplw %3, %0\n\t"		/* compare */
527 		"bne 2f\n\t"			/* exit if not equal */
528 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
529 		"bne- 1b\n\t"			/* spin if failed */
530 		"li %0, 1\n\t"			/* success - retval = 1 */
531 		"b 3f\n\t"			/* we've succeeded */
532 		"2:\n\t"
533 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
534 		"li %0, 0\n\t"			/* failure - retval = 0 */
535 		"3:\n\t"
536 		: "=&r" (ret), "=m" (*p)
537 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
538 		: "cc", "memory");
539 #endif
540 
541 	return (ret);
542 }
543 
544 static __inline int
545 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
546 {
547 	int ret;
548 
549 #ifdef __GNUCLIKE_ASM
550 	__asm __volatile (
551 	    #ifdef __powerpc64__
552 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
553 		"cmpld %3, %0\n\t"		/* compare */
554 		"bne 2f\n\t"			/* exit if not equal */
555 		"stdcx. %4, 0, %2\n\t"      	/* attempt to store */
556 	    #else
557 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
558 		"cmplw %3, %0\n\t"		/* compare */
559 		"bne 2f\n\t"			/* exit if not equal */
560 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
561 	    #endif
562 		"bne- 1b\n\t"			/* spin if failed */
563 		"li %0, 1\n\t"			/* success - retval = 1 */
564 		"b 3f\n\t"			/* we've succeeded */
565 		"2:\n\t"
566 	    #ifdef __powerpc64__
567 		"stdcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
568 	    #else
569 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
570 	    #endif
571 		"li %0, 0\n\t"			/* failure - retval = 0 */
572 		"3:\n\t"
573 		: "=&r" (ret), "=m" (*p)
574 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
575 		: "cc", "memory");
576 #endif
577 
578 	return (ret);
579 }
580 
581 #define	atomic_cmpset_int	atomic_cmpset_32
582 
583 #ifdef __powerpc64__
584 #define	atomic_cmpset_ptr(dst, old, new)	\
585     atomic_cmpset_long((volatile u_long *)(dst), (u_long)(old), (u_long)(new))
586 #else
587 #define	atomic_cmpset_ptr(dst, old, new)	\
588     atomic_cmpset_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
589 #endif
590 
591 static __inline int
592 atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
593 {
594 	int retval;
595 
596 	retval = atomic_cmpset_32(p, cmpval, newval);
597 	__ATOMIC_BARRIER;
598 	return (retval);
599 }
600 
601 static __inline int
602 atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
603 {
604 	__ATOMIC_BARRIER;
605 	return (atomic_cmpset_32(p, cmpval, newval));
606 }
607 
608 static __inline int
609 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
610 {
611 	u_long retval;
612 
613 	retval = atomic_cmpset_long(p, cmpval, newval);
614 	__ATOMIC_BARRIER;
615 	return (retval);
616 }
617 
618 static __inline int
619 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
620 {
621 	__ATOMIC_BARRIER;
622 	return (atomic_cmpset_long(p, cmpval, newval));
623 }
624 
625 #define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
626 #define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
627 
628 #ifdef __powerpc64__
629 #define	atomic_cmpset_acq_ptr(dst, old, new)	\
630     atomic_cmpset_acq_long((volatile u_long *)(dst), (u_long)(old), (u_long)(new))
631 #define	atomic_cmpset_rel_ptr(dst, old, new)	\
632     atomic_cmpset_rel_long((volatile u_long *)(dst), (u_long)(old), (u_long)(new))
633 #else
634 #define	atomic_cmpset_acq_ptr(dst, old, new)	\
635     atomic_cmpset_acq_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
636 #define	atomic_cmpset_rel_ptr(dst, old, new)	\
637     atomic_cmpset_rel_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
638 #endif
639 
640 static __inline uint32_t
641 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
642 {
643 	uint32_t value;
644 
645 	do {
646 		value = *p;
647 	} while (!atomic_cmpset_32(p, value, value + v));
648 	return (value);
649 }
650 
651 #define	atomic_fetchadd_int	atomic_fetchadd_32
652 
653 #ifdef __powerpc64__
654 static __inline uint64_t
655 atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
656 {
657 	uint64_t value;
658 
659 	do {
660 		value = *p;
661 	} while (!atomic_cmpset_long(p, value, value + v));
662 	return (value);
663 }
664 
665 #define	atomic_fetchadd_long	atomic_fetchadd_64
666 #else
667 #define	atomic_fetchadd_long(p, v)	\
668     (u_long)atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
669 #endif
670 
671 #endif /* ! _MACHINE_ATOMIC_H_ */
672