xref: /openbsd/sys/arch/arm/include/atomic.h (revision 771fbea0)
1 /*	$OpenBSD: atomic.h,v 1.18 2017/07/31 11:52:49 kettenis Exp $	*/
2 
3 /* Public Domain */
4 
5 #ifndef _ARM_ATOMIC_H_
6 #define _ARM_ATOMIC_H_
7 
8 /*
9  * Compare and set:
10  * ret = *ptr
11  * if (ret == expect)
12  * 	*ptr = new
13  * return (ret)
14  */
15 #define _def_atomic_cas(_f, _t)					\
16 static inline _t						\
17 _f(volatile _t *p, _t e, _t n)					\
18 {								\
19 	_t ret, modified;					\
20 								\
21 	__asm volatile (					\
22 	    "1:	ldrex %0, [%4]		\n\t"			\
23 	    "	cmp %0, %3		\n\t"			\
24 	    "	bne 2f			\n\t"			\
25 	    "	strex %1, %2, [%4]	\n\t"			\
26 	    "	cmp %1, #0		\n\t"			\
27 	    "	bne 1b			\n\t"			\
28 	    "	b 3f			\n\t"			\
29 	    "2:	clrex			\n\t"			\
30 	    "3:				\n\t"			\
31 	    : "=&r" (ret), "=&r" (modified)			\
32 	    : "r" (n), "r" (e), "r" (p)				\
33 	    : "memory", "cc"					\
34 	);							\
35 	return (ret);						\
36 }
37 _def_atomic_cas(_atomic_cas_uint, unsigned int)
38 _def_atomic_cas(_atomic_cas_ulong, unsigned long)
39 #undef _def_atomic_cas
40 
41 #define atomic_cas_uint(_p, _e, _n) _atomic_cas_uint((_p), (_e), (_n))
42 #define atomic_cas_ulong(_p, _e, _n) _atomic_cas_ulong((_p), (_e), (_n))
43 
44 static inline void *
45 _atomic_cas_ptr(volatile void *p, void *e, void *n)
46 {
47 	void *ret;
48 	unsigned long modified;
49 
50 	__asm volatile (
51 	    "1:	ldrex %0, [%4]		\n\t"
52 	    "	cmp %0, %3		\n\t"
53 	    "	bne 2f			\n\t"
54 	    "	strex %1, %2, [%4]	\n\t"
55 	    "	cmp %1, #0		\n\t"
56 	    "	bne 1b			\n\t"
57 	    "	b 3f			\n\t"
58 	    "2:	clrex			\n\t"
59 	    "3:				\n\t"
60 	    : "=&r" (ret), "=&r" (modified)
61 	    : "r" (n), "r" (e), "r" (p)
62 	    : "memory", "cc"
63 	);
64 	return (ret);
65 }
66 #define atomic_cas_ptr(_p, _e, _n) _atomic_cas_ptr((_p), (_e), (_n))
67 
68 /*
69  * Swap:
70  * ret = *p
71  * *p = val
72  * return (ret)
73  */
74 #define _def_atomic_swap(_f, _t)				\
75 static inline _t						\
76 _f(volatile _t *p, _t v)					\
77 {								\
78 	_t ret, modified;					\
79 								\
80 	__asm volatile (					\
81 	    "1:	ldrex %0, [%3]		\n\t"			\
82 	    "	strex %1, %2, [%3]	\n\t"			\
83 	    "	cmp %1, #0		\n\t"			\
84 	    "	bne 1b			\n\t"			\
85 	    : "=&r" (ret), "=&r" (modified)			\
86 	    : "r" (v), "r" (p)					\
87 	    : "memory", "cc"					\
88 	);							\
89 	return (ret);						\
90 }
91 _def_atomic_swap(_atomic_swap_uint, unsigned int)
92 _def_atomic_swap(_atomic_swap_ulong, unsigned long)
93 #undef _def_atomic_swap
94 
95 #define atomic_swap_uint(_p, _v) _atomic_swap_uint((_p), (_v))
96 #define atomic_swap_ulong(_p, _v) _atomic_swap_ulong((_p), (_v))
97 
98 static inline void *
99 _atomic_swap_ptr(volatile void *p, void *v)
100 {
101 	void *ret;
102 	unsigned long modified;
103 
104 	__asm volatile (
105 	    "1:	ldrex %0, [%3]		\n\t"
106 	    "	strex %1, %2, [%3]	\n\t"
107 	    "	cmp %1, #0		\n\t"
108 	    "	bne 1b			\n\t"
109 	    : "=&r" (ret), "=&r" (modified)
110 	    : "r" (v), "r" (p)
111 	    : "memory", "cc"
112 	);
113 	return (ret);
114 }
115 #define atomic_swap_ptr(_p, _v) _atomic_swap_ptr((_p), (_v))
116 
117 /*
118  * Increment returning the new value
119  * *p += 1
120  * return (*p)
121  */
122 #define _def_atomic_inc_nv(_f, _t)				\
123 static inline _t						\
124 _f(volatile _t *p)						\
125 {								\
126 	_t ret, modified;					\
127 								\
128 	__asm volatile (					\
129 	   "1:	ldrex %0, [%2]		\n\t"			\
130 	    "	add %0, %0, #1		\n\t"			\
131 	    "	strex %1, %0, [%2]	\n\t"			\
132 	    "	cmp %1, #0		\n\t"			\
133 	    "	bne 1b			\n\t"			\
134 	    : "=&r" (ret), "=&r" (modified)			\
135 	    : "r" (p)						\
136 	    : "memory", "cc"					\
137 	);							\
138 	return (ret);						\
139 }
140 _def_atomic_inc_nv(_atomic_inc_int_nv, unsigned int)
141 _def_atomic_inc_nv(_atomic_inc_long_nv, unsigned long)
142 #undef _def_atomic_inc_nv
143 
144 #define atomic_inc_int_nv(_p) _atomic_inc_int_nv((_p))
145 #define atomic_inc_long_nv(_p) _atomic_inc_long_nv((_p))
146 
147 /*
148  * Decrement returning the new value
149  * *p -= 1
150  * return (*p)
151  */
152 #define _def_atomic_dec_nv(_f, _t)				\
153 static inline _t						\
154 _f(volatile _t *p)						\
155 {								\
156 	_t ret, modified;					\
157 								\
158 	__asm volatile (					\
159 	    "1:	ldrex %0, [%2]		\n\t"			\
160 	    "	sub %0, %0, #1		\n\t"			\
161 	    "	strex %1, %0, [%2]	\n\t"			\
162 	    "	cmp %1, #0		\n\t"			\
163 	    "	bne 1b			\n\t"			\
164 	    : "=&r" (ret), "=&r" (modified)			\
165 	    : "r" (p)						\
166 	    : "memory", "cc"					\
167 	);							\
168 	return (ret);						\
169 }
170 _def_atomic_dec_nv(_atomic_dec_int_nv, unsigned int)
171 _def_atomic_dec_nv(_atomic_dec_long_nv, unsigned long)
172 #undef _def_atomic_dec_nv
173 
174 #define atomic_dec_int_nv(_p) _atomic_dec_int_nv((_p))
175 #define atomic_dec_long_nv(_p) _atomic_dec_long_nv((_p))
176 
177 /*
178  * Addition returning the new value
179  * *p += v
180  * return (*p)
181  */
182 #define _def_atomic_add_nv(_f, _t)				\
183 static inline _t						\
184 _f(volatile _t *p, _t v)					\
185 {								\
186 	_t ret, modified;					\
187 								\
188 	__asm volatile (					\
189 	    "1:	ldrex %0, [%2]		\n\t"			\
190 	    "	add %0, %0, %3		\n\t"			\
191 	    "	strex %1, %0, [%2]	\n\t"			\
192 	    "	cmp %1, #0		\n\t"			\
193 	    "	bne 1b			\n\t"			\
194 	    : "=&r" (ret), "=&r" (modified)			\
195 	    : "r" (p), "r" (v)					\
196 	    : "memory", "cc"					\
197 	);							\
198 	return (ret);						\
199 }
200 _def_atomic_add_nv(_atomic_add_int_nv, unsigned int)
201 _def_atomic_add_nv(_atomic_add_long_nv, unsigned long)
202 #undef _def_atomic_add_nv
203 
204 #define atomic_add_int_nv(_p, _v) _atomic_add_int_nv((_p), (_v))
205 #define atomic_add_long_nv(_p, _v) _atomic_add_long_nv((_p), (_v))
206 
207 /*
208  * Subtraction returning the new value
209  * *p -= v
210  * return (*p)
211  */
212 #define _def_atomic_sub_nv(_f, _t)				\
213 static inline _t						\
214 _f(volatile _t *p, _t v)					\
215 {								\
216 	_t ret, modified;					\
217 								\
218 	__asm volatile (					\
219 	    "1:	ldrex %0, [%2]		\n\t"			\
220 	    "	sub %0, %0, %3		\n\t"			\
221 	    "	strex %1, %0, [%2]	\n\t"			\
222 	    "	cmp %1, #0		\n\t"			\
223 	    "	bne 1b			\n\t"			\
224 	    : "=&r" (ret), "=&r" (modified)			\
225 	    : "r" (p), "r" (v)					\
226 	    : "memory", "cc"					\
227 	);							\
228 	return (ret);						\
229 }
230 _def_atomic_sub_nv(_atomic_sub_int_nv, unsigned int)
231 _def_atomic_sub_nv(_atomic_sub_long_nv, unsigned long)
232 #undef _def_atomic_sub_nv
233 
234 #define atomic_sub_int_nv(_p, _v) _atomic_sub_int_nv((_p), (_v))
235 #define atomic_sub_long_nv(_p, _v) _atomic_sub_long_nv((_p), (_v))
236 
237 #define __membar(_f) do { __asm __volatile(_f ::: "memory"); } while (0)
238 
239 #define membar_enter()		__membar("dmb sy")
240 #define membar_exit()		__membar("dmb sy")
241 #define membar_producer()	__membar("dmb st")
242 #define membar_consumer()	__membar("dmb sy")
243 #define membar_sync()		__membar("dmb sy")
244 
245 #if defined(_KERNEL)
246 
247 /* virtio needs MP membars even on SP kernels */
248 #define virtio_membar_producer()	__membar("dmb st")
249 #define virtio_membar_consumer()	__membar("dmb sy")
250 #define virtio_membar_sync()		__membar("dmb sy")
251 
252 /*
253  * Set bits
254  * *p = *p | v
255  */
256 static inline void
257 atomic_setbits_int(volatile unsigned int *p, unsigned int v)
258 {
259 	unsigned int modified, tmp;
260 
261 	__asm volatile (
262 	    "1:	ldrex %0, [%3]		\n\t"
263 	    "	orr %0, %0, %2		\n\t"
264 	    "	strex %1, %0, [%3]	\n\t"
265 	    "	cmp %1, #0		\n\t"
266 	    "	bne 1b			\n\t"
267 	    : "=&r" (tmp), "=&r" (modified)
268 	    : "r" (v), "r" (p)
269 	    : "memory", "cc"
270 	);
271 }
272 
273 /*
274  * Clear bits
275  * *p = *p & (~v)
276  */
277 static inline void
278 atomic_clearbits_int(volatile unsigned int *p, unsigned int v)
279 {
280 	unsigned int modified, tmp;
281 
282 	__asm volatile (
283 	    "1:	ldrex %0, [%3]		\n\t"
284 	    "	bic %0, %0, %2		\n\t"
285 	    "	strex %1, %0, [%3]	\n\t"
286 	    "	cmp %1, #0		\n\t"
287 	    "	bne 1b			\n\t"
288 	    : "=&r" (tmp), "=&r" (modified)
289 	    : "r" (v), "r" (p)
290 	    : "memory", "cc"
291 	);
292 }
293 
294 #endif /* defined(_KERNEL) */
295 #endif /* _ARM_ATOMIC_H_ */
296