xref: /freebsd/sys/amd64/include/atomic.h (revision 73bb5aea)
1069e9bc1SDoug Rabson /*-
24d846d26SWarner Losh  * SPDX-License-Identifier: BSD-2-Clause
3c49761ddSPedro F. Giffuni  *
4069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
5069e9bc1SDoug Rabson  * All rights reserved.
6069e9bc1SDoug Rabson  *
7069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
8069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
9069e9bc1SDoug Rabson  * are met:
10069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
12069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
13069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
14069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
15069e9bc1SDoug Rabson  *
16069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26069e9bc1SDoug Rabson  * SUCH DAMAGE.
27069e9bc1SDoug Rabson  */
2808f16287SBrooks Davis 
2908f16287SBrooks Davis #ifdef __i386__
3008f16287SBrooks Davis #include <i386/atomic.h>
3108f16287SBrooks Davis #else /* !__i386__ */
3208f16287SBrooks Davis 
33069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
34069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
35069e9bc1SDoug Rabson 
36d8b56c8eSAlan Cox /*
37d8b56c8eSAlan Cox  * To express interprocessor (as opposed to processor and device) memory
38d8b56c8eSAlan Cox  * ordering constraints, use the atomic_*() functions with acquire and release
39d8b56c8eSAlan Cox  * semantics rather than the *mb() functions.  An architecture's memory
40d8b56c8eSAlan Cox  * ordering (or memory consistency) model governs the order in which a
41d8b56c8eSAlan Cox  * program's accesses to different locations may be performed by an
42d8b56c8eSAlan Cox  * implementation of that architecture.  In general, for memory regions
43d8b56c8eSAlan Cox  * defined as writeback cacheable, the memory ordering implemented by amd64
44d8b56c8eSAlan Cox  * processors preserves the program ordering of a load followed by a load, a
45d8b56c8eSAlan Cox  * load followed by a store, and a store followed by a store.  Only a store
46d8b56c8eSAlan Cox  * followed by a load to a different memory location may be reordered.
47d8b56c8eSAlan Cox  * Therefore, except for special cases, like non-temporal memory accesses or
48d8b56c8eSAlan Cox  * memory regions defined as write combining, the memory ordering effects
49d8b56c8eSAlan Cox  * provided by the sfence instruction in the wmb() function and the lfence
50d8b56c8eSAlan Cox  * instruction in the rmb() function are redundant.  In contrast, the
51d8b56c8eSAlan Cox  * atomic_*() functions with acquire and release semantics do not perform
52d8b56c8eSAlan Cox  * redundant instructions for ordinary cases of interprocessor memory
53d8b56c8eSAlan Cox  * ordering on any architecture.
54d8b56c8eSAlan Cox  */
55d9492a44SAttilio Rao #define	mb()	__asm __volatile("mfence;" : : : "memory")
56d9492a44SAttilio Rao #define	wmb()	__asm __volatile("sfence;" : : : "memory")
57d9492a44SAttilio Rao #define	rmb()	__asm __volatile("lfence;" : : : "memory")
58db7f0b97SKip Macy 
59849aef49SAndrew Turner #ifdef _KERNEL
60849aef49SAndrew Turner /*
61849aef49SAndrew Turner  * OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
62849aef49SAndrew Turner  *
63849aef49SAndrew Turner  * The open-coded number is used instead of the symbolic expression to
64849aef49SAndrew Turner  * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
65849aef49SAndrew Turner  * An assertion in amd64/vm_machdep.c ensures that the value is correct.
66849aef49SAndrew Turner  */
67849aef49SAndrew Turner #define	OFFSETOF_MONITORBUF	0x100
68849aef49SAndrew Turner #endif
69849aef49SAndrew Turner 
70a90d053bSMark Johnston #if defined(SAN_NEEDS_INTERCEPTORS) && !defined(SAN_RUNTIME)
71435c7cfbSMark Johnston #include <sys/atomic_san.h>
72849aef49SAndrew Turner #else
7330d4f9e8SKonstantin Belousov #include <sys/atomic_common.h>
7430d4f9e8SKonstantin Belousov 
75069e9bc1SDoug Rabson /*
76f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
77f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
78069e9bc1SDoug Rabson  *
7947b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
8047b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
8147b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
8247b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
8347b8bc92SAlan Cox  *
8447b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
8547b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
8647b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
8747b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
8847b8bc92SAlan Cox  *
8947b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
9047b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
9147b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
9247b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
938a1ee2d3SJung-uk Kim  * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
94f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
9547b8bc92SAlan Cox  *
9647b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
9747b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
9847b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
9947b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
1008a1ee2d3SJung-uk Kim  * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
101f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
102069e9bc1SDoug Rabson  */
103069e9bc1SDoug Rabson 
1042a89a48fSJohn Baldwin /*
10573bb5aeaSElyes Haouas  * Always use lock prefixes.  The result is slightly less optimal for
1069c0b759bSKonstantin Belousov  * UP systems, but it matters less now, and sometimes UP is emulated
1079c0b759bSKonstantin Belousov  * over SMP.
1089c0b759bSKonstantin Belousov  *
10986d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
11086d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
11186d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
11247b8bc92SAlan Cox  */
113e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
11447b8bc92SAlan Cox static __inline void					\
11503e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
11647b8bc92SAlan Cox {							\
1179c0b759bSKonstantin Belousov 	__asm __volatile("lock; " OP			\
118fe94be3dSJung-uk Kim 	: "+m" (*p)					\
119fe94be3dSJung-uk Kim 	: CONS (V)					\
1207222d2fbSKonstantin Belousov 	: "cc");					\
121cda07865SPeter Wemm }							\
12286d2e48cSAttilio Rao 							\
12386d2e48cSAttilio Rao static __inline void					\
12486d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
12586d2e48cSAttilio Rao {							\
1269c0b759bSKonstantin Belousov 	__asm __volatile("lock; " OP			\
127fe94be3dSJung-uk Kim 	: "+m" (*p)					\
128fe94be3dSJung-uk Kim 	: CONS (V)					\
1297222d2fbSKonstantin Belousov 	: "memory", "cc");				\
13086d2e48cSAttilio Rao }							\
131cda07865SPeter Wemm struct __hack
1324c5aee92SMark Murray 
133819e370cSPoul-Henning Kamp /*
1343d673254SMark Johnston  * Atomic compare and set, used by the mutex functions.
135819e370cSPoul-Henning Kamp  *
1363d673254SMark Johnston  * cmpset:
1373d673254SMark Johnston  *	if (*dst == expect)
1383d673254SMark Johnston  *		*dst = src
139819e370cSPoul-Henning Kamp  *
1403d673254SMark Johnston  * fcmpset:
1413d673254SMark Johnston  *	if (*dst == *expect)
1423d673254SMark Johnston  *		*dst = src
1433d673254SMark Johnston  *	else
1443d673254SMark Johnston  *		*expect = *dst
1453d673254SMark Johnston  *
1463d673254SMark Johnston  * Returns 0 on failure, non-zero on success.
147819e370cSPoul-Henning Kamp  */
1483d673254SMark Johnston #define	ATOMIC_CMPSET(TYPE)				\
1493d673254SMark Johnston static __inline int					\
1503d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
1513d673254SMark Johnston {							\
1523d673254SMark Johnston 	u_char res;					\
1533d673254SMark Johnston 							\
1543d673254SMark Johnston 	__asm __volatile(				\
1559c0b759bSKonstantin Belousov 	" lock; cmpxchg %3,%1 ;	"			\
1563d673254SMark Johnston 	"# atomic_cmpset_" #TYPE "	"		\
1576d1a70ddSRyan Libby 	: "=@cce" (res),		/* 0 */		\
1583d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
1593d673254SMark Johnston 	  "+a" (expect)			/* 2 */		\
1603d673254SMark Johnston 	: "r" (src)			/* 3 */		\
1613d673254SMark Johnston 	: "memory", "cc");				\
1623d673254SMark Johnston 	return (res);					\
1633d673254SMark Johnston }							\
1643d673254SMark Johnston 							\
1653d673254SMark Johnston static __inline int					\
1663d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
1673d673254SMark Johnston {							\
1683d673254SMark Johnston 	u_char res;					\
1693d673254SMark Johnston 							\
1703d673254SMark Johnston 	__asm __volatile(				\
1719c0b759bSKonstantin Belousov 	" lock; cmpxchg %3,%1 ;		"		\
1723d673254SMark Johnston 	"# atomic_fcmpset_" #TYPE "	"		\
1736d1a70ddSRyan Libby 	: "=@cce" (res),		/* 0 */		\
1743d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
1753d673254SMark Johnston 	  "+a" (*expect)		/* 2 */		\
1763d673254SMark Johnston 	: "r" (src)			/* 3 */		\
1773d673254SMark Johnston 	: "memory", "cc");				\
1783d673254SMark Johnston 	return (res);					\
1798448afceSAttilio Rao }
1808448afceSAttilio Rao 
1813d673254SMark Johnston ATOMIC_CMPSET(char);
1823d673254SMark Johnston ATOMIC_CMPSET(short);
1833d673254SMark Johnston ATOMIC_CMPSET(int);
1843d673254SMark Johnston ATOMIC_CMPSET(long);
185f7c61770SMateusz Guzik 
1863c2bc2bfSJohn Baldwin /*
1873c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
1883c2bc2bfSJohn Baldwin  * the previous value of *p.
1893c2bc2bfSJohn Baldwin  */
1903c2bc2bfSJohn Baldwin static __inline u_int
atomic_fetchadd_int(volatile u_int * p,u_int v)1913c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
1923c2bc2bfSJohn Baldwin {
1933c2bc2bfSJohn Baldwin 
1943c2bc2bfSJohn Baldwin 	__asm __volatile(
1959c0b759bSKonstantin Belousov 	" lock; xaddl	%0,%1 ;		"
1963c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
197ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
198fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
199fe94be3dSJung-uk Kim 	: : "cc");
2003c2bc2bfSJohn Baldwin 	return (v);
2013c2bc2bfSJohn Baldwin }
2023c2bc2bfSJohn Baldwin 
2036eb4157fSPawel Jakub Dawidek /*
2046eb4157fSPawel Jakub Dawidek  * Atomically add the value of v to the long integer pointed to by p and return
2056eb4157fSPawel Jakub Dawidek  * the previous value of *p.
2066eb4157fSPawel Jakub Dawidek  */
2076eb4157fSPawel Jakub Dawidek static __inline u_long
atomic_fetchadd_long(volatile u_long * p,u_long v)2086eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
2096eb4157fSPawel Jakub Dawidek {
2106eb4157fSPawel Jakub Dawidek 
2116eb4157fSPawel Jakub Dawidek 	__asm __volatile(
2129c0b759bSKonstantin Belousov 	" lock;	xaddq	%0,%1 ;		"
2136eb4157fSPawel Jakub Dawidek 	"# atomic_fetchadd_long"
214ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
215fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
216fe94be3dSJung-uk Kim 	: : "cc");
2176eb4157fSPawel Jakub Dawidek 	return (v);
2186eb4157fSPawel Jakub Dawidek }
2196eb4157fSPawel Jakub Dawidek 
2208a1ee2d3SJung-uk Kim static __inline int
atomic_testandset_int(volatile u_int * p,u_int v)2218a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
2228a1ee2d3SJung-uk Kim {
2238a1ee2d3SJung-uk Kim 	u_char res;
2248a1ee2d3SJung-uk Kim 
2258a1ee2d3SJung-uk Kim 	__asm __volatile(
2269c0b759bSKonstantin Belousov 	" lock;	btsl	%2,%1 ;		"
2278a1ee2d3SJung-uk Kim 	"# atomic_testandset_int"
2286d1a70ddSRyan Libby 	: "=@ccc" (res),		/* 0 */
2298a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2308a1ee2d3SJung-uk Kim 	: "Ir" (v & 0x1f)		/* 2 */
2318a1ee2d3SJung-uk Kim 	: "cc");
2328a1ee2d3SJung-uk Kim 	return (res);
2338a1ee2d3SJung-uk Kim }
2348a1ee2d3SJung-uk Kim 
2358a1ee2d3SJung-uk Kim static __inline int
atomic_testandset_long(volatile u_long * p,u_int v)2368a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
2378a1ee2d3SJung-uk Kim {
2388a1ee2d3SJung-uk Kim 	u_char res;
2398a1ee2d3SJung-uk Kim 
2408a1ee2d3SJung-uk Kim 	__asm __volatile(
2419c0b759bSKonstantin Belousov 	" lock;	btsq	%2,%1 ;		"
2428a1ee2d3SJung-uk Kim 	"# atomic_testandset_long"
2436d1a70ddSRyan Libby 	: "=@ccc" (res),		/* 0 */
2448a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2458a1ee2d3SJung-uk Kim 	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
2468a1ee2d3SJung-uk Kim 	: "cc");
2478a1ee2d3SJung-uk Kim 	return (res);
2488a1ee2d3SJung-uk Kim }
2498a1ee2d3SJung-uk Kim 
250dfdc9a05SSepherosa Ziehau static __inline int
atomic_testandclear_int(volatile u_int * p,u_int v)251dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v)
252dfdc9a05SSepherosa Ziehau {
253dfdc9a05SSepherosa Ziehau 	u_char res;
254dfdc9a05SSepherosa Ziehau 
255dfdc9a05SSepherosa Ziehau 	__asm __volatile(
2569c0b759bSKonstantin Belousov 	" lock;	btrl	%2,%1 ;		"
257dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_int"
2586d1a70ddSRyan Libby 	: "=@ccc" (res),		/* 0 */
259dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
260dfdc9a05SSepherosa Ziehau 	: "Ir" (v & 0x1f)		/* 2 */
261dfdc9a05SSepherosa Ziehau 	: "cc");
262dfdc9a05SSepherosa Ziehau 	return (res);
263dfdc9a05SSepherosa Ziehau }
264dfdc9a05SSepherosa Ziehau 
265dfdc9a05SSepherosa Ziehau static __inline int
atomic_testandclear_long(volatile u_long * p,u_int v)266dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v)
267dfdc9a05SSepherosa Ziehau {
268dfdc9a05SSepherosa Ziehau 	u_char res;
269dfdc9a05SSepherosa Ziehau 
270dfdc9a05SSepherosa Ziehau 	__asm __volatile(
2719c0b759bSKonstantin Belousov 	" lock;	btrq	%2,%1 ;		"
272dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_long"
2736d1a70ddSRyan Libby 	: "=@ccc" (res),		/* 0 */
274dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
275dfdc9a05SSepherosa Ziehau 	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
276dfdc9a05SSepherosa Ziehau 	: "cc");
277dfdc9a05SSepherosa Ziehau 	return (res);
278dfdc9a05SSepherosa Ziehau }
279dfdc9a05SSepherosa Ziehau 
280fa9f322dSKonstantin Belousov /*
281fa9f322dSKonstantin Belousov  * We assume that a = b will do atomic loads and stores.  Due to the
282fa9f322dSKonstantin Belousov  * IA32 memory model, a simple store guarantees release semantics.
283fa9f322dSKonstantin Belousov  *
2847626d062SKonstantin Belousov  * However, a load may pass a store if they are performed on distinct
2851d1ec02cSKonstantin Belousov  * addresses, so we need a Store/Load barrier for sequentially
2861d1ec02cSKonstantin Belousov  * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
2871d1ec02cSKonstantin Belousov  * Store/Load barrier, as recommended by the AMD Software Optimization
2880b6476ecSKonstantin Belousov  * Guide, and not mfence.  To avoid false data dependencies, we use a
2890b6476ecSKonstantin Belousov  * special address for "mem".  In the kernel, we use a private per-cpu
2900b6476ecSKonstantin Belousov  * cache line.  In user space, we use a word in the stack's red zone
2910b6476ecSKonstantin Belousov  * (-8(%rsp)).
292fa9f322dSKonstantin Belousov  */
293fa9f322dSKonstantin Belousov 
2947626d062SKonstantin Belousov static __inline void
__storeload_barrier(void)2957626d062SKonstantin Belousov __storeload_barrier(void)
2967626d062SKonstantin Belousov {
2979c0b759bSKonstantin Belousov #if defined(_KERNEL)
2987626d062SKonstantin Belousov 	__asm __volatile("lock; addl $0,%%gs:%0"
2997626d062SKonstantin Belousov 	    : "+m" (*(u_int *)OFFSETOF_MONITORBUF) : : "memory", "cc");
3007626d062SKonstantin Belousov #else /* !_KERNEL */
3017626d062SKonstantin Belousov 	__asm __volatile("lock; addl $0,-8(%%rsp)" : : : "memory", "cc");
3027626d062SKonstantin Belousov #endif /* _KERNEL*/
3039c0b759bSKonstantin Belousov }
3047626d062SKonstantin Belousov 
3057626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
3069d979d89SJohn Baldwin static __inline u_##TYPE					\
3079d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
3089d979d89SJohn Baldwin {								\
3099d979d89SJohn Baldwin 	u_##TYPE res;						\
3109d979d89SJohn Baldwin 								\
3117626d062SKonstantin Belousov 	res = *p;						\
3127626d062SKonstantin Belousov 	__compiler_membar();					\
3139d979d89SJohn Baldwin 	return (res);						\
3149d979d89SJohn Baldwin }								\
315cda07865SPeter Wemm struct __hack
3164c5aee92SMark Murray 
3177626d062SKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
3187626d062SKonstantin Belousov static __inline void						\
3197626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
3207626d062SKonstantin Belousov {								\
3217626d062SKonstantin Belousov 								\
3227626d062SKonstantin Belousov 	__compiler_membar();					\
3237626d062SKonstantin Belousov 	*p = v;							\
3247626d062SKonstantin Belousov }								\
3257626d062SKonstantin Belousov struct __hack
3269e76f9adSPeter Wemm 
3278954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq(void)3288954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
3298954a9a4SKonstantin Belousov {
3308954a9a4SKonstantin Belousov 
3318954a9a4SKonstantin Belousov 	__compiler_membar();
3328954a9a4SKonstantin Belousov }
3338954a9a4SKonstantin Belousov 
3348954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_rel(void)3358954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
3368954a9a4SKonstantin Belousov {
3378954a9a4SKonstantin Belousov 
3388954a9a4SKonstantin Belousov 	__compiler_membar();
3398954a9a4SKonstantin Belousov }
3408954a9a4SKonstantin Belousov 
3418954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq_rel(void)3428954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
3438954a9a4SKonstantin Belousov {
3448954a9a4SKonstantin Belousov 
3458954a9a4SKonstantin Belousov 	__compiler_membar();
3468954a9a4SKonstantin Belousov }
3478954a9a4SKonstantin Belousov 
3488954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_seq_cst(void)3498954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
3508954a9a4SKonstantin Belousov {
3518954a9a4SKonstantin Belousov 
3528954a9a4SKonstantin Belousov 	__storeload_barrier();
3538954a9a4SKonstantin Belousov }
3548954a9a4SKonstantin Belousov 
3558306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
3568306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
3578306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
3588306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
3598a6b1c8fSJohn Baldwin 
3608306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
3618306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
3628306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
3638306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
3648a6b1c8fSJohn Baldwin 
3658306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
3668306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
3678306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
3688306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
3698a6b1c8fSJohn Baldwin 
37079ba9195SJohn Baldwin ATOMIC_ASM(set,	     long,  "orq %1,%0",   "er",  v);
37179ba9195SJohn Baldwin ATOMIC_ASM(clear,    long,  "andq %1,%0",  "er", ~v);
37279ba9195SJohn Baldwin ATOMIC_ASM(add,	     long,  "addq %1,%0",  "er",  v);
37379ba9195SJohn Baldwin ATOMIC_ASM(subtract, long,  "subq %1,%0",  "er",  v);
3749d979d89SJohn Baldwin 
3757626d062SKonstantin Belousov #define	ATOMIC_LOADSTORE(TYPE)					\
3767626d062SKonstantin Belousov 	ATOMIC_LOAD(TYPE);					\
3777626d062SKonstantin Belousov 	ATOMIC_STORE(TYPE)
378fa9f322dSKonstantin Belousov 
3797626d062SKonstantin Belousov ATOMIC_LOADSTORE(char);
3807626d062SKonstantin Belousov ATOMIC_LOADSTORE(short);
3817626d062SKonstantin Belousov ATOMIC_LOADSTORE(int);
3827626d062SKonstantin Belousov ATOMIC_LOADSTORE(long);
383ccbdd9eeSJohn Baldwin 
3848a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
385fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
386fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
3877626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE
388f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
38948281036SJohn Baldwin 
3908a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
39148281036SJohn Baldwin static __inline u_int
atomic_swap_int(volatile u_int * p,u_int v)3928a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
39348281036SJohn Baldwin {
39448281036SJohn Baldwin 
39548281036SJohn Baldwin 	__asm __volatile(
39648281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
3978a1ee2d3SJung-uk Kim 	"# atomic_swap_int"
3988a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
399fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
4008a1ee2d3SJung-uk Kim 	return (v);
40148281036SJohn Baldwin }
40248281036SJohn Baldwin 
40348281036SJohn Baldwin static __inline u_long
atomic_swap_long(volatile u_long * p,u_long v)4048a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
40548281036SJohn Baldwin {
40648281036SJohn Baldwin 
40748281036SJohn Baldwin 	__asm __volatile(
40848281036SJohn Baldwin 	"	xchgq	%1,%0 ;		"
4098a1ee2d3SJung-uk Kim 	"# atomic_swap_long"
4108a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
411fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
4128a1ee2d3SJung-uk Kim 	return (v);
41348281036SJohn Baldwin }
41448281036SJohn Baldwin 
41586d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
41686d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
41786d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
41886d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
41986d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
42086d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
42186d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
42286d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
4233d673254SMark Johnston #define	atomic_cmpset_acq_char		atomic_cmpset_char
4243d673254SMark Johnston #define	atomic_cmpset_rel_char		atomic_cmpset_char
4253d673254SMark Johnston #define	atomic_fcmpset_acq_char		atomic_fcmpset_char
4263d673254SMark Johnston #define	atomic_fcmpset_rel_char		atomic_fcmpset_char
4278a6b1c8fSJohn Baldwin 
42886d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
42986d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
43086d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
43186d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
43286d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
43386d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
43486d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
43586d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
4363d673254SMark Johnston #define	atomic_cmpset_acq_short		atomic_cmpset_short
4373d673254SMark Johnston #define	atomic_cmpset_rel_short		atomic_cmpset_short
4383d673254SMark Johnston #define	atomic_fcmpset_acq_short	atomic_fcmpset_short
4393d673254SMark Johnston #define	atomic_fcmpset_rel_short	atomic_fcmpset_short
4408a6b1c8fSJohn Baldwin 
44186d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
44286d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
44386d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
44486d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
44586d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
44686d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
44786d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
44886d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
4498448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
4508448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
451f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_int		atomic_fcmpset_int
452f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_int		atomic_fcmpset_int
4538a6b1c8fSJohn Baldwin 
45486d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
45586d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
45686d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
45786d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
45886d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
45986d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
46086d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
46186d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
4628448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
4638448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
464f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_long		atomic_fcmpset_long
465f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_long		atomic_fcmpset_long
4668a6b1c8fSJohn Baldwin 
4678a1ee2d3SJung-uk Kim #define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
4688a1ee2d3SJung-uk Kim #define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
469ca0ec73cSConrad Meyer #define	atomic_testandset_acq_long	atomic_testandset_long
4708a1ee2d3SJung-uk Kim 
47148281036SJohn Baldwin /* Operations on 8-bit bytes. */
4728a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
4738a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
4748a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
4758a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
4768a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
4778a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
4788a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
4798a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
4808a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
4818a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
4828a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
4838a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
4848a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
4858a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
4863d673254SMark Johnston #define	atomic_cmpset_8		atomic_cmpset_char
4873d673254SMark Johnston #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
4883d673254SMark Johnston #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
4893d673254SMark Johnston #define	atomic_fcmpset_8	atomic_fcmpset_char
4903d673254SMark Johnston #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
4913d673254SMark Johnston #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
4928a6b1c8fSJohn Baldwin 
49348281036SJohn Baldwin /* Operations on 16-bit words. */
4948a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
4958a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
4968a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
4978a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
4988a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
4998a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
5008a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
5018a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
5028a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
5038a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
5048a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
5058a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
5068a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
5078a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
5083d673254SMark Johnston #define	atomic_cmpset_16	atomic_cmpset_short
5093d673254SMark Johnston #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
5103d673254SMark Johnston #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
5113d673254SMark Johnston #define	atomic_fcmpset_16	atomic_fcmpset_short
5123d673254SMark Johnston #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
5133d673254SMark Johnston #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
5148a6b1c8fSJohn Baldwin 
51548281036SJohn Baldwin /* Operations on 32-bit double words. */
5168a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
5178a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
5188a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
5198a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
5208a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
5218a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
5228a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
5238a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
5248a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
5258a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
5268a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
5278a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
5288a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
5298a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
5308a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
5318a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
5328a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
533f7c61770SMateusz Guzik #define	atomic_fcmpset_32	atomic_fcmpset_int
534f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
535f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
5368a1ee2d3SJung-uk Kim #define	atomic_swap_32		atomic_swap_int
5378a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
5383c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
5398a1ee2d3SJung-uk Kim #define	atomic_testandset_32	atomic_testandset_int
540dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_32	atomic_testandclear_int
5418a6b1c8fSJohn Baldwin 
5425d2f4de5SJohn Baldwin /* Operations on 64-bit quad words. */
5435d2f4de5SJohn Baldwin #define	atomic_set_64		atomic_set_long
5445d2f4de5SJohn Baldwin #define	atomic_set_acq_64	atomic_set_acq_long
5455d2f4de5SJohn Baldwin #define	atomic_set_rel_64	atomic_set_rel_long
5465d2f4de5SJohn Baldwin #define	atomic_clear_64		atomic_clear_long
5475d2f4de5SJohn Baldwin #define	atomic_clear_acq_64	atomic_clear_acq_long
5485d2f4de5SJohn Baldwin #define	atomic_clear_rel_64	atomic_clear_rel_long
5495d2f4de5SJohn Baldwin #define	atomic_add_64		atomic_add_long
5505d2f4de5SJohn Baldwin #define	atomic_add_acq_64	atomic_add_acq_long
5515d2f4de5SJohn Baldwin #define	atomic_add_rel_64	atomic_add_rel_long
5525d2f4de5SJohn Baldwin #define	atomic_subtract_64	atomic_subtract_long
5535d2f4de5SJohn Baldwin #define	atomic_subtract_acq_64	atomic_subtract_acq_long
5545d2f4de5SJohn Baldwin #define	atomic_subtract_rel_64	atomic_subtract_rel_long
5555d2f4de5SJohn Baldwin #define	atomic_load_acq_64	atomic_load_acq_long
5565d2f4de5SJohn Baldwin #define	atomic_store_rel_64	atomic_store_rel_long
5575d2f4de5SJohn Baldwin #define	atomic_cmpset_64	atomic_cmpset_long
5585d2f4de5SJohn Baldwin #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
5595d2f4de5SJohn Baldwin #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
560f7c61770SMateusz Guzik #define	atomic_fcmpset_64	atomic_fcmpset_long
561f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
562f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
5638a1ee2d3SJung-uk Kim #define	atomic_swap_64		atomic_swap_long
5645d2f4de5SJohn Baldwin #define	atomic_readandclear_64	atomic_readandclear_long
565c1ecb7e1SHans Petter Selasky #define	atomic_fetchadd_64	atomic_fetchadd_long
5668a1ee2d3SJung-uk Kim #define	atomic_testandset_64	atomic_testandset_long
567dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_64	atomic_testandclear_long
5685d2f4de5SJohn Baldwin 
56948281036SJohn Baldwin /* Operations on pointers. */
570122eceefSJohn Baldwin #define	atomic_set_ptr		atomic_set_long
571122eceefSJohn Baldwin #define	atomic_set_acq_ptr	atomic_set_acq_long
572122eceefSJohn Baldwin #define	atomic_set_rel_ptr	atomic_set_rel_long
573122eceefSJohn Baldwin #define	atomic_clear_ptr	atomic_clear_long
574122eceefSJohn Baldwin #define	atomic_clear_acq_ptr	atomic_clear_acq_long
575122eceefSJohn Baldwin #define	atomic_clear_rel_ptr	atomic_clear_rel_long
576122eceefSJohn Baldwin #define	atomic_add_ptr		atomic_add_long
577122eceefSJohn Baldwin #define	atomic_add_acq_ptr	atomic_add_acq_long
578122eceefSJohn Baldwin #define	atomic_add_rel_ptr	atomic_add_rel_long
579122eceefSJohn Baldwin #define	atomic_subtract_ptr	atomic_subtract_long
580122eceefSJohn Baldwin #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
581122eceefSJohn Baldwin #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
582122eceefSJohn Baldwin #define	atomic_load_acq_ptr	atomic_load_acq_long
583122eceefSJohn Baldwin #define	atomic_store_rel_ptr	atomic_store_rel_long
584122eceefSJohn Baldwin #define	atomic_cmpset_ptr	atomic_cmpset_long
585122eceefSJohn Baldwin #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
586122eceefSJohn Baldwin #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
587f7c61770SMateusz Guzik #define	atomic_fcmpset_ptr	atomic_fcmpset_long
588f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
589f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
5908a1ee2d3SJung-uk Kim #define	atomic_swap_ptr		atomic_swap_long
591122eceefSJohn Baldwin #define	atomic_readandclear_ptr	atomic_readandclear_long
592ccbdd9eeSJohn Baldwin 
593f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
5946c296ffaSBruce Evans 
595a90d053bSMark Johnston #endif /* !SAN_NEEDS_INTERCEPTORS || SAN_RUNTIME */
596849aef49SAndrew Turner 
597069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
59808f16287SBrooks Davis 
59908f16287SBrooks Davis #endif /* __i386__ */
600