1 /* $NetBSD: atomic_op_asm.h,v 1.1 2014/08/10 05:47:35 matt Exp $ */
2 
3 /*-
4  * Copyright (c) 2014 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * This code is derived from software contributed to The NetBSD Foundation
8  * by Matt Thomas of 3am Software Foundry.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  * POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 #ifndef _ATOMIC_OP_ASM_H_
33 #define	_ATOMIC_OP_ASM_H_
34 
35 #include <machine/asm.h>
36 
37 #define	ATOMIC_OP8(OP, INSN)						\
38 ENTRY_NP(_atomic_##OP##_8)						;\
39 	mov	x4, x0							;\
40 1:	ldxrb	w0, [x4]		/* load old value */		;\
41 	INSN	w2, w1, w0		/* calculate new value */	;\
42 	stxrb	w3, w2, [x4]		/* try to store */		;\
43 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
44 	dmb	st							;\
45 	ret				/* return old value */		;\
46 END(_atomic_##OP##_8)
47 
48 #define	ATOMIC_OP8_NV(OP, INSN)						\
49 ENTRY_NP(_atomic_##OP##_8_nv)						;\
50 	mov	x4, x0			/* need r0 for return value */	;\
51 1:	ldxrb	w0, [x4]		/* load old value */		;\
52 	INSN	w2, w1, w0		/* calc new (return) value */	;\
53 	stxrb	w3, w2, [x4]		/* try to store */		;\
54 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
55 	dmb	st							;\
56 	ret				/* return new value */		;\
57 END(_atomic_##OP##_8_nv)
58 
59 #define	ATOMIC_OP16(OP, INSN)						\
60 ENTRY_NP(_atomic_##OP##_16)						;\
61 	mov	x4, x0							;\
62 1:	ldxrh	w0, [x4]		/* load old value */		;\
63 	INSN	w2, w1, w0		/* calculate new value */	;\
64 	stxrh	w3, w2, [x4]		/* try to store */		;\
65 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
66 	dmb	st							;\
67 	ret				/* return old value */		;\
68 END(_atomic_##OP##_16)
69 
70 #define	ATOMIC_OP16_NV(OP, INSN)					\
71 ENTRY_NP(_atomic_##OP##_16_nv)						;\
72 	mov	x4, x0			/* need r0 for return value */	;\
73 1:	ldxrh	w0, [x4]		/* load old value */		;\
74 	INSN	w2, w1, w0		/* calc new (return) value */	;\
75 	stxrh	w3, w2, [x4]		/* try to store */		;\
76 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
77 	dmb	st							;\
78 	ret				/* return new value */		;\
79 END(_atomic_##OP##_16_nv)
80 
81 #define	ATOMIC_OP32(OP, INSN)						\
82 ENTRY_NP(_atomic_##OP##_32)						;\
83 	mov	x4, x0							;\
84 1:	ldxr	w0, [x4]		/* load old value */		;\
85 	INSN	w2, w1, w0		/* calculate new value */	;\
86 	stxr	w3, w2, [x4]		/* try to store */		;\
87 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
88 	dmb	st							;\
89 	ret				/* return old value */		;\
90 END(_atomic_##OP##_32)
91 
92 #define	ATOMIC_OP32_NV(OP, INSN)					\
93 ENTRY_NP(_atomic_##OP##_32_nv)						;\
94 	mov	x4, x0			/* need r0 for return value */	;\
95 1:	ldxr	w0, [x4]		/* load old value */		;\
96 	INSN	w0, w0, w1		/* calc new (return) value */	;\
97 	stxr	w3, w0, [x4]		/* try to store */		;\
98 	cbnz	w3, 1b			/*   succeed? no, try again? */	;\
99 	dmb	sy							;\
100 	ret				/* return new value */		;\
101 END(_atomic_##OP##_32_nv)
102 
103 #define	ATOMIC_OP64(OP, INSN)						\
104 ENTRY_NP(_atomic_##OP##_64)						;\
105 	mov	x4, x0							;\
106 1:	ldxr	x0, [x4]		/* load old value */		;\
107 	INSN	x2, x1, x0		/* calculate new value */	;\
108 	stxr	w3, x2, [x4]		/* try to store */		;\
109 	cbnz	w3, 1b			/*   succeed? no, try again */	;\
110 	dmb	st							;\
111 	ret				/* return old value */		;\
112 END(_atomic_##OP##_64)
113 
114 #define	ATOMIC_OP64_NV(OP, INSN)					\
115 ENTRY_NP(_atomic_##OP##_64_nv)						;\
116 	mov	x4, x0			/* need r0 for return value */	;\
117 1:	ldxr	x0, [x4]		/* load old value */		;\
118 	INSN	x0, x0, x1		/* calc new (return) value */	;\
119 	stxr	w3, x0, [x4]		/* try to store */		;\
120 	cbnz	w3, 1b			/*   succeed? no, try again? */	;\
121 	dmb	sy							;\
122 	ret				/* return new value */		;\
123 END(_atomic_##OP##_64_nv)
124 
125 #if defined(_KERNEL)
126 
127 #define	ATOMIC_OP_ALIAS(a,s)	STRONG_ALIAS(a,s)
128 
129 #else /* _KERNEL */
130 
131 #define	ATOMIC_OP_ALIAS(a,s)	WEAK_ALIAS(a,s)
132 
133 #endif /* _KERNEL */
134 
135 #endif /* _ATOMIC_OP_ASM_H_ */
136