xref: /freebsd/sys/amd64/include/atomic.h (revision 3d673254)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
33a5f50ef9SJoerg Wunsch #endif
34a5f50ef9SJoerg Wunsch 
35d8b56c8eSAlan Cox /*
36d8b56c8eSAlan Cox  * To express interprocessor (as opposed to processor and device) memory
37d8b56c8eSAlan Cox  * ordering constraints, use the atomic_*() functions with acquire and release
38d8b56c8eSAlan Cox  * semantics rather than the *mb() functions.  An architecture's memory
39d8b56c8eSAlan Cox  * ordering (or memory consistency) model governs the order in which a
40d8b56c8eSAlan Cox  * program's accesses to different locations may be performed by an
41d8b56c8eSAlan Cox  * implementation of that architecture.  In general, for memory regions
42d8b56c8eSAlan Cox  * defined as writeback cacheable, the memory ordering implemented by amd64
43d8b56c8eSAlan Cox  * processors preserves the program ordering of a load followed by a load, a
44d8b56c8eSAlan Cox  * load followed by a store, and a store followed by a store.  Only a store
45d8b56c8eSAlan Cox  * followed by a load to a different memory location may be reordered.
46d8b56c8eSAlan Cox  * Therefore, except for special cases, like non-temporal memory accesses or
47d8b56c8eSAlan Cox  * memory regions defined as write combining, the memory ordering effects
48d8b56c8eSAlan Cox  * provided by the sfence instruction in the wmb() function and the lfence
49d8b56c8eSAlan Cox  * instruction in the rmb() function are redundant.  In contrast, the
50d8b56c8eSAlan Cox  * atomic_*() functions with acquire and release semantics do not perform
51d8b56c8eSAlan Cox  * redundant instructions for ordinary cases of interprocessor memory
52d8b56c8eSAlan Cox  * ordering on any architecture.
53d8b56c8eSAlan Cox  */
54d9492a44SAttilio Rao #define	mb()	__asm __volatile("mfence;" : : : "memory")
55d9492a44SAttilio Rao #define	wmb()	__asm __volatile("sfence;" : : : "memory")
56d9492a44SAttilio Rao #define	rmb()	__asm __volatile("lfence;" : : : "memory")
57db7f0b97SKip Macy 
58069e9bc1SDoug Rabson /*
59f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
60f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
61069e9bc1SDoug Rabson  *
6247b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
6347b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
6447b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
6547b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
6647b8bc92SAlan Cox  *
6747b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
6847b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
6947b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
7047b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
7147b8bc92SAlan Cox  *
7247b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
7347b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
7447b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
7547b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
768a1ee2d3SJung-uk Kim  * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
77f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
7847b8bc92SAlan Cox  *
7947b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
8047b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
8147b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
8247b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
838a1ee2d3SJung-uk Kim  * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
84f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
85069e9bc1SDoug Rabson  */
86069e9bc1SDoug Rabson 
8747b8bc92SAlan Cox /*
8808c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
8908c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
9008c40841SAlan Cox  * built.
9108c40841SAlan Cox  *
9208c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
9308c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
9447b8bc92SAlan Cox  */
95276c702dSBruce Evans #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
96e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
9786d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
9886d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
9908c40841SAlan Cox 
1003d673254SMark Johnston int	atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
1013d673254SMark Johnston int	atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
102065b12a7SPoul-Henning Kamp int	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
103065b12a7SPoul-Henning Kamp int	atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src);
1043d673254SMark Johnston int	atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
1053d673254SMark Johnston int	atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
1063d673254SMark Johnston 	    u_short src);
107f7c61770SMateusz Guzik int	atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
108f7c61770SMateusz Guzik int	atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src);
1093c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
1106eb4157fSPawel Jakub Dawidek u_long	atomic_fetchadd_long(volatile u_long *p, u_long v);
1118a1ee2d3SJung-uk Kim int	atomic_testandset_int(volatile u_int *p, u_int v);
1128a1ee2d3SJung-uk Kim int	atomic_testandset_long(volatile u_long *p, u_int v);
113dfdc9a05SSepherosa Ziehau int	atomic_testandclear_int(volatile u_int *p, u_int v);
114dfdc9a05SSepherosa Ziehau int	atomic_testandclear_long(volatile u_long *p, u_int v);
1158954a9a4SKonstantin Belousov void	atomic_thread_fence_acq(void);
1168954a9a4SKonstantin Belousov void	atomic_thread_fence_acq_rel(void);
1178954a9a4SKonstantin Belousov void	atomic_thread_fence_rel(void);
1188954a9a4SKonstantin Belousov void	atomic_thread_fence_seq_cst(void);
119819e370cSPoul-Henning Kamp 
1207626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
121fa9f322dSKonstantin Belousov u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
122fa9f322dSKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
1238306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
1248a6b1c8fSJohn Baldwin 
125276c702dSBruce Evans #else /* !KLD_MODULE && __GNUCLIKE_ASM */
1264c5aee92SMark Murray 
1272a89a48fSJohn Baldwin /*
128f28e1c8fSBruce Evans  * For userland, always use lock prefixes so that the binaries will run
129f28e1c8fSBruce Evans  * on both SMP and !SMP systems.
1302a89a48fSJohn Baldwin  */
1310d2a2989SPeter Wemm #if defined(SMP) || !defined(_KERNEL)
1327e4277e5SBruce Evans #define	MPLOCKED	"lock ; "
133d2f22d70SBruce Evans #else
13447b8bc92SAlan Cox #define	MPLOCKED
135d2f22d70SBruce Evans #endif
136069e9bc1SDoug Rabson 
13747b8bc92SAlan Cox /*
13886d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
13986d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
14086d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
14147b8bc92SAlan Cox  */
142e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
14347b8bc92SAlan Cox static __inline void					\
14403e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
14547b8bc92SAlan Cox {							\
1467e4277e5SBruce Evans 	__asm __volatile(MPLOCKED OP			\
147fe94be3dSJung-uk Kim 	: "+m" (*p)					\
148fe94be3dSJung-uk Kim 	: CONS (V)					\
1497222d2fbSKonstantin Belousov 	: "cc");					\
150cda07865SPeter Wemm }							\
15186d2e48cSAttilio Rao 							\
15286d2e48cSAttilio Rao static __inline void					\
15386d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
15486d2e48cSAttilio Rao {							\
15586d2e48cSAttilio Rao 	__asm __volatile(MPLOCKED OP			\
156fe94be3dSJung-uk Kim 	: "+m" (*p)					\
157fe94be3dSJung-uk Kim 	: CONS (V)					\
1587222d2fbSKonstantin Belousov 	: "memory", "cc");				\
15986d2e48cSAttilio Rao }							\
160cda07865SPeter Wemm struct __hack
1614c5aee92SMark Murray 
162819e370cSPoul-Henning Kamp /*
1633d673254SMark Johnston  * Atomic compare and set, used by the mutex functions.
164819e370cSPoul-Henning Kamp  *
1653d673254SMark Johnston  * cmpset:
1663d673254SMark Johnston  *	if (*dst == expect)
1673d673254SMark Johnston  *		*dst = src
168819e370cSPoul-Henning Kamp  *
1693d673254SMark Johnston  * fcmpset:
1703d673254SMark Johnston  *	if (*dst == *expect)
1713d673254SMark Johnston  *		*dst = src
1723d673254SMark Johnston  *	else
1733d673254SMark Johnston  *		*expect = *dst
1743d673254SMark Johnston  *
1753d673254SMark Johnston  * Returns 0 on failure, non-zero on success.
176819e370cSPoul-Henning Kamp  */
1773d673254SMark Johnston #define	ATOMIC_CMPSET(TYPE)				\
1783d673254SMark Johnston static __inline int					\
1793d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
1803d673254SMark Johnston {							\
1813d673254SMark Johnston 	u_char res;					\
1823d673254SMark Johnston 							\
1833d673254SMark Johnston 	__asm __volatile(				\
1843d673254SMark Johnston 	"	" MPLOCKED "		"		\
1853d673254SMark Johnston 	"	cmpxchg %3,%1 ;	"			\
1863d673254SMark Johnston 	"	sete	%0 ;		"		\
1873d673254SMark Johnston 	"# atomic_cmpset_" #TYPE "	"		\
1883d673254SMark Johnston 	: "=q" (res),			/* 0 */		\
1893d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
1903d673254SMark Johnston 	  "+a" (expect)			/* 2 */		\
1913d673254SMark Johnston 	: "r" (src)			/* 3 */		\
1923d673254SMark Johnston 	: "memory", "cc");				\
1933d673254SMark Johnston 	return (res);					\
1943d673254SMark Johnston }							\
1953d673254SMark Johnston 							\
1963d673254SMark Johnston static __inline int					\
1973d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
1983d673254SMark Johnston {							\
1993d673254SMark Johnston 	u_char res;					\
2003d673254SMark Johnston 							\
2013d673254SMark Johnston 	__asm __volatile(				\
2023d673254SMark Johnston 	"	" MPLOCKED "		"		\
2033d673254SMark Johnston 	"	cmpxchg %3,%1 ;		"		\
2043d673254SMark Johnston 	"	sete	%0 ;		"		\
2053d673254SMark Johnston 	"# atomic_fcmpset_" #TYPE "	"		\
2063d673254SMark Johnston 	: "=q" (res),			/* 0 */		\
2073d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
2083d673254SMark Johnston 	  "+a" (*expect)		/* 2 */		\
2093d673254SMark Johnston 	: "r" (src)			/* 3 */		\
2103d673254SMark Johnston 	: "memory", "cc");				\
2113d673254SMark Johnston 	return (res);					\
2128448afceSAttilio Rao }
2138448afceSAttilio Rao 
2143d673254SMark Johnston ATOMIC_CMPSET(char);
2153d673254SMark Johnston ATOMIC_CMPSET(short);
2163d673254SMark Johnston ATOMIC_CMPSET(int);
2173d673254SMark Johnston ATOMIC_CMPSET(long);
218f7c61770SMateusz Guzik 
2193c2bc2bfSJohn Baldwin /*
2203c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
2213c2bc2bfSJohn Baldwin  * the previous value of *p.
2223c2bc2bfSJohn Baldwin  */
2233c2bc2bfSJohn Baldwin static __inline u_int
2243c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
2253c2bc2bfSJohn Baldwin {
2263c2bc2bfSJohn Baldwin 
2273c2bc2bfSJohn Baldwin 	__asm __volatile(
2287e4277e5SBruce Evans 	"	" MPLOCKED "		"
2293c2bc2bfSJohn Baldwin 	"	xaddl	%0,%1 ;		"
2303c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
231ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
232fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
233fe94be3dSJung-uk Kim 	: : "cc");
2343c2bc2bfSJohn Baldwin 	return (v);
2353c2bc2bfSJohn Baldwin }
2363c2bc2bfSJohn Baldwin 
2376eb4157fSPawel Jakub Dawidek /*
2386eb4157fSPawel Jakub Dawidek  * Atomically add the value of v to the long integer pointed to by p and return
2396eb4157fSPawel Jakub Dawidek  * the previous value of *p.
2406eb4157fSPawel Jakub Dawidek  */
2416eb4157fSPawel Jakub Dawidek static __inline u_long
2426eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
2436eb4157fSPawel Jakub Dawidek {
2446eb4157fSPawel Jakub Dawidek 
2456eb4157fSPawel Jakub Dawidek 	__asm __volatile(
2466eb4157fSPawel Jakub Dawidek 	"	" MPLOCKED "		"
2476eb4157fSPawel Jakub Dawidek 	"	xaddq	%0,%1 ;		"
2486eb4157fSPawel Jakub Dawidek 	"# atomic_fetchadd_long"
249ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
250fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
251fe94be3dSJung-uk Kim 	: : "cc");
2526eb4157fSPawel Jakub Dawidek 	return (v);
2536eb4157fSPawel Jakub Dawidek }
2546eb4157fSPawel Jakub Dawidek 
2558a1ee2d3SJung-uk Kim static __inline int
2568a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
2578a1ee2d3SJung-uk Kim {
2588a1ee2d3SJung-uk Kim 	u_char res;
2598a1ee2d3SJung-uk Kim 
2608a1ee2d3SJung-uk Kim 	__asm __volatile(
2618a1ee2d3SJung-uk Kim 	"	" MPLOCKED "		"
2628a1ee2d3SJung-uk Kim 	"	btsl	%2,%1 ;		"
2638a1ee2d3SJung-uk Kim 	"	setc	%0 ;		"
2648a1ee2d3SJung-uk Kim 	"# atomic_testandset_int"
2658a1ee2d3SJung-uk Kim 	: "=q" (res),			/* 0 */
2668a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2678a1ee2d3SJung-uk Kim 	: "Ir" (v & 0x1f)		/* 2 */
2688a1ee2d3SJung-uk Kim 	: "cc");
2698a1ee2d3SJung-uk Kim 	return (res);
2708a1ee2d3SJung-uk Kim }
2718a1ee2d3SJung-uk Kim 
2728a1ee2d3SJung-uk Kim static __inline int
2738a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
2748a1ee2d3SJung-uk Kim {
2758a1ee2d3SJung-uk Kim 	u_char res;
2768a1ee2d3SJung-uk Kim 
2778a1ee2d3SJung-uk Kim 	__asm __volatile(
2788a1ee2d3SJung-uk Kim 	"	" MPLOCKED "		"
2798a1ee2d3SJung-uk Kim 	"	btsq	%2,%1 ;		"
2808a1ee2d3SJung-uk Kim 	"	setc	%0 ;		"
2818a1ee2d3SJung-uk Kim 	"# atomic_testandset_long"
2828a1ee2d3SJung-uk Kim 	: "=q" (res),			/* 0 */
2838a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2848a1ee2d3SJung-uk Kim 	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
2858a1ee2d3SJung-uk Kim 	: "cc");
2868a1ee2d3SJung-uk Kim 	return (res);
2878a1ee2d3SJung-uk Kim }
2888a1ee2d3SJung-uk Kim 
289dfdc9a05SSepherosa Ziehau static __inline int
290dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v)
291dfdc9a05SSepherosa Ziehau {
292dfdc9a05SSepherosa Ziehau 	u_char res;
293dfdc9a05SSepherosa Ziehau 
294dfdc9a05SSepherosa Ziehau 	__asm __volatile(
295dfdc9a05SSepherosa Ziehau 	"	" MPLOCKED "		"
296dfdc9a05SSepherosa Ziehau 	"	btrl	%2,%1 ;		"
297dfdc9a05SSepherosa Ziehau 	"	setc	%0 ;		"
298dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_int"
299dfdc9a05SSepherosa Ziehau 	: "=q" (res),			/* 0 */
300dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
301dfdc9a05SSepherosa Ziehau 	: "Ir" (v & 0x1f)		/* 2 */
302dfdc9a05SSepherosa Ziehau 	: "cc");
303dfdc9a05SSepherosa Ziehau 	return (res);
304dfdc9a05SSepherosa Ziehau }
305dfdc9a05SSepherosa Ziehau 
306dfdc9a05SSepherosa Ziehau static __inline int
307dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v)
308dfdc9a05SSepherosa Ziehau {
309dfdc9a05SSepherosa Ziehau 	u_char res;
310dfdc9a05SSepherosa Ziehau 
311dfdc9a05SSepherosa Ziehau 	__asm __volatile(
312dfdc9a05SSepherosa Ziehau 	"	" MPLOCKED "		"
313dfdc9a05SSepherosa Ziehau 	"	btrq	%2,%1 ;		"
314dfdc9a05SSepherosa Ziehau 	"	setc	%0 ;		"
315dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_long"
316dfdc9a05SSepherosa Ziehau 	: "=q" (res),			/* 0 */
317dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
318dfdc9a05SSepherosa Ziehau 	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
319dfdc9a05SSepherosa Ziehau 	: "cc");
320dfdc9a05SSepherosa Ziehau 	return (res);
321dfdc9a05SSepherosa Ziehau }
322dfdc9a05SSepherosa Ziehau 
323fa9f322dSKonstantin Belousov /*
324fa9f322dSKonstantin Belousov  * We assume that a = b will do atomic loads and stores.  Due to the
325fa9f322dSKonstantin Belousov  * IA32 memory model, a simple store guarantees release semantics.
326fa9f322dSKonstantin Belousov  *
3277626d062SKonstantin Belousov  * However, a load may pass a store if they are performed on distinct
3281d1ec02cSKonstantin Belousov  * addresses, so we need a Store/Load barrier for sequentially
3291d1ec02cSKonstantin Belousov  * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
3301d1ec02cSKonstantin Belousov  * Store/Load barrier, as recommended by the AMD Software Optimization
3310b6476ecSKonstantin Belousov  * Guide, and not mfence.  To avoid false data dependencies, we use a
3320b6476ecSKonstantin Belousov  * special address for "mem".  In the kernel, we use a private per-cpu
3330b6476ecSKonstantin Belousov  * cache line.  In user space, we use a word in the stack's red zone
3340b6476ecSKonstantin Belousov  * (-8(%rsp)).
3357626d062SKonstantin Belousov  *
3367626d062SKonstantin Belousov  * For UP kernels, however, the memory of the single processor is
3377626d062SKonstantin Belousov  * always consistent, so we only need to stop the compiler from
3387626d062SKonstantin Belousov  * reordering accesses in a way that violates the semantics of acquire
3397626d062SKonstantin Belousov  * and release.
340fa9f322dSKonstantin Belousov  */
341fa9f322dSKonstantin Belousov 
3427626d062SKonstantin Belousov #if defined(_KERNEL)
3439e76f9adSPeter Wemm 
3447626d062SKonstantin Belousov /*
3457626d062SKonstantin Belousov  * OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
3467626d062SKonstantin Belousov  *
3477626d062SKonstantin Belousov  * The open-coded number is used instead of the symbolic expression to
3487626d062SKonstantin Belousov  * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
3497626d062SKonstantin Belousov  * An assertion in amd64/vm_machdep.c ensures that the value is correct.
3507626d062SKonstantin Belousov  */
3517626d062SKonstantin Belousov #define	OFFSETOF_MONITORBUF	0x180
3529e76f9adSPeter Wemm 
3537626d062SKonstantin Belousov #if defined(SMP)
3547626d062SKonstantin Belousov static __inline void
3557626d062SKonstantin Belousov __storeload_barrier(void)
3567626d062SKonstantin Belousov {
3579e76f9adSPeter Wemm 
3587626d062SKonstantin Belousov 	__asm __volatile("lock; addl $0,%%gs:%0"
3597626d062SKonstantin Belousov 	    : "+m" (*(u_int *)OFFSETOF_MONITORBUF) : : "memory", "cc");
3607626d062SKonstantin Belousov }
3617626d062SKonstantin Belousov #else /* _KERNEL && UP */
3627626d062SKonstantin Belousov static __inline void
3637626d062SKonstantin Belousov __storeload_barrier(void)
3647626d062SKonstantin Belousov {
3657626d062SKonstantin Belousov 
3667626d062SKonstantin Belousov 	__compiler_membar();
3677626d062SKonstantin Belousov }
3687626d062SKonstantin Belousov #endif /* SMP */
3697626d062SKonstantin Belousov #else /* !_KERNEL */
3707626d062SKonstantin Belousov static __inline void
3717626d062SKonstantin Belousov __storeload_barrier(void)
3727626d062SKonstantin Belousov {
3737626d062SKonstantin Belousov 
3747626d062SKonstantin Belousov 	__asm __volatile("lock; addl $0,-8(%%rsp)" : : : "memory", "cc");
3757626d062SKonstantin Belousov }
3767626d062SKonstantin Belousov #endif /* _KERNEL*/
3777626d062SKonstantin Belousov 
3787626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
3799d979d89SJohn Baldwin static __inline u_##TYPE					\
3809d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
3819d979d89SJohn Baldwin {								\
3829d979d89SJohn Baldwin 	u_##TYPE res;						\
3839d979d89SJohn Baldwin 								\
3847626d062SKonstantin Belousov 	res = *p;						\
3857626d062SKonstantin Belousov 	__compiler_membar();					\
3869d979d89SJohn Baldwin 	return (res);						\
3879d979d89SJohn Baldwin }								\
388cda07865SPeter Wemm struct __hack
3894c5aee92SMark Murray 
3907626d062SKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
3917626d062SKonstantin Belousov static __inline void						\
3927626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
3937626d062SKonstantin Belousov {								\
3947626d062SKonstantin Belousov 								\
3957626d062SKonstantin Belousov 	__compiler_membar();					\
3967626d062SKonstantin Belousov 	*p = v;							\
3977626d062SKonstantin Belousov }								\
3987626d062SKonstantin Belousov struct __hack
3999e76f9adSPeter Wemm 
4008954a9a4SKonstantin Belousov static __inline void
4018954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
4028954a9a4SKonstantin Belousov {
4038954a9a4SKonstantin Belousov 
4048954a9a4SKonstantin Belousov 	__compiler_membar();
4058954a9a4SKonstantin Belousov }
4068954a9a4SKonstantin Belousov 
4078954a9a4SKonstantin Belousov static __inline void
4088954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
4098954a9a4SKonstantin Belousov {
4108954a9a4SKonstantin Belousov 
4118954a9a4SKonstantin Belousov 	__compiler_membar();
4128954a9a4SKonstantin Belousov }
4138954a9a4SKonstantin Belousov 
4148954a9a4SKonstantin Belousov static __inline void
4158954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
4168954a9a4SKonstantin Belousov {
4178954a9a4SKonstantin Belousov 
4188954a9a4SKonstantin Belousov 	__compiler_membar();
4198954a9a4SKonstantin Belousov }
4208954a9a4SKonstantin Belousov 
4218954a9a4SKonstantin Belousov static __inline void
4228954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
4238954a9a4SKonstantin Belousov {
4248954a9a4SKonstantin Belousov 
4258954a9a4SKonstantin Belousov 	__storeload_barrier();
4268954a9a4SKonstantin Belousov }
4278954a9a4SKonstantin Belousov 
428276c702dSBruce Evans #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
4298a6b1c8fSJohn Baldwin 
4308306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
4318306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
4328306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
4338306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
4348a6b1c8fSJohn Baldwin 
4358306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
4368306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
4378306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
4388306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
4398a6b1c8fSJohn Baldwin 
4408306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
4418306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
4428306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
4438306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
4448a6b1c8fSJohn Baldwin 
445afa88623SPeter Wemm ATOMIC_ASM(set,	     long,  "orq %1,%0",   "ir",  v);
446afa88623SPeter Wemm ATOMIC_ASM(clear,    long,  "andq %1,%0",  "ir", ~v);
447afa88623SPeter Wemm ATOMIC_ASM(add,	     long,  "addq %1,%0",  "ir",  v);
448afa88623SPeter Wemm ATOMIC_ASM(subtract, long,  "subq %1,%0",  "ir",  v);
4499d979d89SJohn Baldwin 
4507626d062SKonstantin Belousov #define	ATOMIC_LOADSTORE(TYPE)					\
4517626d062SKonstantin Belousov 	ATOMIC_LOAD(TYPE);					\
4527626d062SKonstantin Belousov 	ATOMIC_STORE(TYPE)
453fa9f322dSKonstantin Belousov 
4547626d062SKonstantin Belousov ATOMIC_LOADSTORE(char);
4557626d062SKonstantin Belousov ATOMIC_LOADSTORE(short);
4567626d062SKonstantin Belousov ATOMIC_LOADSTORE(int);
4577626d062SKonstantin Belousov ATOMIC_LOADSTORE(long);
458ccbdd9eeSJohn Baldwin 
4598a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
460fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
461fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
4627626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE
463f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
46448281036SJohn Baldwin 
4658a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
466276c702dSBruce Evans #ifdef __GNUCLIKE_ASM
46748281036SJohn Baldwin 
46848281036SJohn Baldwin static __inline u_int
4698a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
47048281036SJohn Baldwin {
47148281036SJohn Baldwin 
47248281036SJohn Baldwin 	__asm __volatile(
47348281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
4748a1ee2d3SJung-uk Kim 	"# atomic_swap_int"
4758a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
476fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
4778a1ee2d3SJung-uk Kim 	return (v);
47848281036SJohn Baldwin }
47948281036SJohn Baldwin 
48048281036SJohn Baldwin static __inline u_long
4818a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
48248281036SJohn Baldwin {
48348281036SJohn Baldwin 
48448281036SJohn Baldwin 	__asm __volatile(
48548281036SJohn Baldwin 	"	xchgq	%1,%0 ;		"
4868a1ee2d3SJung-uk Kim 	"# atomic_swap_long"
4878a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
488fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
4898a1ee2d3SJung-uk Kim 	return (v);
49048281036SJohn Baldwin }
49148281036SJohn Baldwin 
492276c702dSBruce Evans #else /* !__GNUCLIKE_ASM */
49348281036SJohn Baldwin 
4948a1ee2d3SJung-uk Kim u_int	atomic_swap_int(volatile u_int *p, u_int v);
4958a1ee2d3SJung-uk Kim u_long	atomic_swap_long(volatile u_long *p, u_long v);
49648281036SJohn Baldwin 
497276c702dSBruce Evans #endif /* __GNUCLIKE_ASM */
49848281036SJohn Baldwin 
49986d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
50086d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
50186d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
50286d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
50386d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
50486d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
50586d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
50686d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
5073d673254SMark Johnston #define	atomic_cmpset_acq_char		atomic_cmpset_char
5083d673254SMark Johnston #define	atomic_cmpset_rel_char		atomic_cmpset_char
5093d673254SMark Johnston #define	atomic_fcmpset_acq_char		atomic_fcmpset_char
5103d673254SMark Johnston #define	atomic_fcmpset_rel_char		atomic_fcmpset_char
5118a6b1c8fSJohn Baldwin 
51286d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
51386d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
51486d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
51586d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
51686d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
51786d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
51886d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
51986d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
5203d673254SMark Johnston #define	atomic_cmpset_acq_short		atomic_cmpset_short
5213d673254SMark Johnston #define	atomic_cmpset_rel_short		atomic_cmpset_short
5223d673254SMark Johnston #define	atomic_fcmpset_acq_short	atomic_fcmpset_short
5233d673254SMark Johnston #define	atomic_fcmpset_rel_short	atomic_fcmpset_short
5248a6b1c8fSJohn Baldwin 
52586d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
52686d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
52786d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
52886d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
52986d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
53086d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
53186d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
53286d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
5338448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
5348448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
535f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_int		atomic_fcmpset_int
536f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_int		atomic_fcmpset_int
5378a6b1c8fSJohn Baldwin 
53886d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
53986d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
54086d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
54186d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
54286d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
54386d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
54486d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
54586d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
5468448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
5478448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
548f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_long		atomic_fcmpset_long
549f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_long		atomic_fcmpset_long
5508a6b1c8fSJohn Baldwin 
5518a1ee2d3SJung-uk Kim #define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
5528a1ee2d3SJung-uk Kim #define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
5538a1ee2d3SJung-uk Kim 
55448281036SJohn Baldwin /* Operations on 8-bit bytes. */
5558a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
5568a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
5578a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
5588a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
5598a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
5608a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
5618a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
5628a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
5638a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
5648a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
5658a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
5668a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
5678a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
5688a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
5693d673254SMark Johnston #define	atomic_cmpset_8		atomic_cmpset_char
5703d673254SMark Johnston #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
5713d673254SMark Johnston #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
5723d673254SMark Johnston #define	atomic_fcmpset_8	atomic_fcmpset_char
5733d673254SMark Johnston #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
5743d673254SMark Johnston #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
5758a6b1c8fSJohn Baldwin 
57648281036SJohn Baldwin /* Operations on 16-bit words. */
5778a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
5788a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
5798a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
5808a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
5818a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
5828a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
5838a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
5848a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
5858a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
5868a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
5878a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
5888a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
5898a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
5908a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
5913d673254SMark Johnston #define	atomic_cmpset_16	atomic_cmpset_short
5923d673254SMark Johnston #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
5933d673254SMark Johnston #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
5943d673254SMark Johnston #define	atomic_fcmpset_16	atomic_fcmpset_short
5953d673254SMark Johnston #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
5963d673254SMark Johnston #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
5978a6b1c8fSJohn Baldwin 
59848281036SJohn Baldwin /* Operations on 32-bit double words. */
5998a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
6008a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
6018a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
6028a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
6038a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
6048a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
6058a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
6068a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
6078a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
6088a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
6098a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
6108a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
6118a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
6128a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
6138a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
6148a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
6158a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
616f7c61770SMateusz Guzik #define	atomic_fcmpset_32	atomic_fcmpset_int
617f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
618f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
6198a1ee2d3SJung-uk Kim #define	atomic_swap_32		atomic_swap_int
6208a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
6213c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
6228a1ee2d3SJung-uk Kim #define	atomic_testandset_32	atomic_testandset_int
623dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_32	atomic_testandclear_int
6248a6b1c8fSJohn Baldwin 
6255d2f4de5SJohn Baldwin /* Operations on 64-bit quad words. */
6265d2f4de5SJohn Baldwin #define	atomic_set_64		atomic_set_long
6275d2f4de5SJohn Baldwin #define	atomic_set_acq_64	atomic_set_acq_long
6285d2f4de5SJohn Baldwin #define	atomic_set_rel_64	atomic_set_rel_long
6295d2f4de5SJohn Baldwin #define	atomic_clear_64		atomic_clear_long
6305d2f4de5SJohn Baldwin #define	atomic_clear_acq_64	atomic_clear_acq_long
6315d2f4de5SJohn Baldwin #define	atomic_clear_rel_64	atomic_clear_rel_long
6325d2f4de5SJohn Baldwin #define	atomic_add_64		atomic_add_long
6335d2f4de5SJohn Baldwin #define	atomic_add_acq_64	atomic_add_acq_long
6345d2f4de5SJohn Baldwin #define	atomic_add_rel_64	atomic_add_rel_long
6355d2f4de5SJohn Baldwin #define	atomic_subtract_64	atomic_subtract_long
6365d2f4de5SJohn Baldwin #define	atomic_subtract_acq_64	atomic_subtract_acq_long
6375d2f4de5SJohn Baldwin #define	atomic_subtract_rel_64	atomic_subtract_rel_long
6385d2f4de5SJohn Baldwin #define	atomic_load_acq_64	atomic_load_acq_long
6395d2f4de5SJohn Baldwin #define	atomic_store_rel_64	atomic_store_rel_long
6405d2f4de5SJohn Baldwin #define	atomic_cmpset_64	atomic_cmpset_long
6415d2f4de5SJohn Baldwin #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
6425d2f4de5SJohn Baldwin #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
643f7c61770SMateusz Guzik #define	atomic_fcmpset_64	atomic_fcmpset_long
644f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
645f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
6468a1ee2d3SJung-uk Kim #define	atomic_swap_64		atomic_swap_long
6475d2f4de5SJohn Baldwin #define	atomic_readandclear_64	atomic_readandclear_long
648c1ecb7e1SHans Petter Selasky #define	atomic_fetchadd_64	atomic_fetchadd_long
6498a1ee2d3SJung-uk Kim #define	atomic_testandset_64	atomic_testandset_long
650dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_64	atomic_testandclear_long
6515d2f4de5SJohn Baldwin 
65248281036SJohn Baldwin /* Operations on pointers. */
653122eceefSJohn Baldwin #define	atomic_set_ptr		atomic_set_long
654122eceefSJohn Baldwin #define	atomic_set_acq_ptr	atomic_set_acq_long
655122eceefSJohn Baldwin #define	atomic_set_rel_ptr	atomic_set_rel_long
656122eceefSJohn Baldwin #define	atomic_clear_ptr	atomic_clear_long
657122eceefSJohn Baldwin #define	atomic_clear_acq_ptr	atomic_clear_acq_long
658122eceefSJohn Baldwin #define	atomic_clear_rel_ptr	atomic_clear_rel_long
659122eceefSJohn Baldwin #define	atomic_add_ptr		atomic_add_long
660122eceefSJohn Baldwin #define	atomic_add_acq_ptr	atomic_add_acq_long
661122eceefSJohn Baldwin #define	atomic_add_rel_ptr	atomic_add_rel_long
662122eceefSJohn Baldwin #define	atomic_subtract_ptr	atomic_subtract_long
663122eceefSJohn Baldwin #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
664122eceefSJohn Baldwin #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
665122eceefSJohn Baldwin #define	atomic_load_acq_ptr	atomic_load_acq_long
666122eceefSJohn Baldwin #define	atomic_store_rel_ptr	atomic_store_rel_long
667122eceefSJohn Baldwin #define	atomic_cmpset_ptr	atomic_cmpset_long
668122eceefSJohn Baldwin #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
669122eceefSJohn Baldwin #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
670f7c61770SMateusz Guzik #define	atomic_fcmpset_ptr	atomic_fcmpset_long
671f7c61770SMateusz Guzik #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
672f7c61770SMateusz Guzik #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
6738a1ee2d3SJung-uk Kim #define	atomic_swap_ptr		atomic_swap_long
674122eceefSJohn Baldwin #define	atomic_readandclear_ptr	atomic_readandclear_long
675ccbdd9eeSJohn Baldwin 
676f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
6776c296ffaSBruce Evans 
678069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
679