xref: /freebsd/sys/contrib/ck/include/gcc/arm/ck_pr.h (revision 0957b409)
1 /*
2  * Copyright 2009-2015 Samy Al Bahra.
3  * Copyright 2013-2015 Olivier Houchard.
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #ifndef CK_PR_ARM_H
29 #define CK_PR_ARM_H
30 
31 #ifndef CK_PR_H
32 #error Do not include this file directly, use ck_pr.h
33 #endif
34 
35 #include <ck_cc.h>
36 #include <ck_md.h>
37 
38 /*
39  * The following represent supported atomic operations.
40  * These operations may be emulated.
41  */
42 #include "ck_f_pr.h"
43 
44 /*
45  * Minimum interface requirement met.
46  */
47 #define CK_F_PR
48 
49 CK_CC_INLINE static void
50 ck_pr_stall(void)
51 {
52 
53 	__asm__ __volatile__("" ::: "memory");
54 	return;
55 }
56 
57 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
58 #define CK_ISB __asm __volatile("isb" : : "r" (0) : "memory")
59 #define CK_DMB __asm __volatile("dmb" : : "r" (0) : "memory")
60 #define CK_DSB __asm __volatile("dsb" : : "r" (0) : "memory")
61 /* FreeBSD's toolchain doesn't accept dmb st, so use the opcode instead */
62 #ifdef __FreeBSD__
63 #define CK_DMB_ST __asm __volatile(".word 0xf57ff05e" : : "r" (0) : "memory")
64 #else
65 #define CK_DMB_ST __asm __volatile("dmb st" : : "r" (0) : "memory")
66 #endif /* __FreeBSD__ */
67 #else
68 /* armv6 doesn't have dsb/dmb/isb, and no way to wait only for stores */
69 #define CK_ISB \
70     __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
71 #define CK_DSB \
72     __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
73 #define CK_DMB  \
74     __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory")
75 #define CK_DMB_ST CK_DMB
76 #endif
77 
78 #define CK_PR_FENCE(T, I)				\
79 	CK_CC_INLINE static void			\
80 	ck_pr_fence_strict_##T(void)			\
81 	{						\
82 		I;					\
83 	}
84 
85 CK_PR_FENCE(atomic, CK_DMB_ST)
86 CK_PR_FENCE(atomic_store, CK_DMB_ST)
87 CK_PR_FENCE(atomic_load, CK_DMB_ST)
88 CK_PR_FENCE(store_atomic, CK_DMB_ST)
89 CK_PR_FENCE(load_atomic, CK_DMB)
90 CK_PR_FENCE(store, CK_DMB_ST)
91 CK_PR_FENCE(store_load, CK_DMB)
92 CK_PR_FENCE(load, CK_DMB)
93 CK_PR_FENCE(load_store, CK_DMB)
94 CK_PR_FENCE(memory, CK_DMB)
95 CK_PR_FENCE(acquire, CK_DMB)
96 CK_PR_FENCE(release, CK_DMB)
97 CK_PR_FENCE(acqrel, CK_DMB)
98 CK_PR_FENCE(lock, CK_DMB)
99 CK_PR_FENCE(unlock, CK_DMB)
100 
101 #undef CK_PR_FENCE
102 
103 #undef CK_ISB
104 #undef CK_DSB
105 #undef CK_DMB
106 #undef CK_DMB_ST
107 
108 #define CK_PR_LOAD(S, M, T, C, I)				\
109 	CK_CC_INLINE static T					\
110 	ck_pr_md_load_##S(const M *target)			\
111 	{							\
112 		long r = 0;					\
113 		__asm__ __volatile__(I " %0, [%1];"		\
114 					: "=r" (r)		\
115 					: "r"  (target)		\
116 					: "memory");		\
117 		return ((T)r);					\
118 	}
119 
120 CK_PR_LOAD(ptr, void, void *, uint32_t, "ldr")
121 
122 #define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I)
123 
124 CK_PR_LOAD_S(32, uint32_t, "ldr")
125 CK_PR_LOAD_S(16, uint16_t, "ldrh")
126 CK_PR_LOAD_S(8, uint8_t, "ldrb")
127 CK_PR_LOAD_S(uint, unsigned int, "ldr")
128 CK_PR_LOAD_S(int, int, "ldr")
129 CK_PR_LOAD_S(short, short, "ldrh")
130 CK_PR_LOAD_S(char, char, "ldrb")
131 
132 #undef CK_PR_LOAD_S
133 #undef CK_PR_LOAD
134 
135 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
136 
137 #define CK_PR_DOUBLE_LOAD(T, N) 		\
138 CK_CC_INLINE static T				\
139 ck_pr_md_load_##N(const T *target)		\
140 {						\
141 	register T ret;				\
142 						\
143 	__asm __volatile("ldrexd %0, [%1]" 	\
144 	    : "=&r" (ret)			\
145 	    : "r" (target)			\
146 	    : "memory", "cc");			\
147 	return (ret);				\
148 }
149 
150 CK_PR_DOUBLE_LOAD(uint64_t, 64)
151 #ifndef CK_PR_DISABLE_DOUBLE
152 CK_PR_DOUBLE_LOAD(double, double)
153 #endif
154 #undef CK_PR_DOUBLE_LOAD
155 #endif
156 
157 #define CK_PR_STORE(S, M, T, C, I)				\
158 	CK_CC_INLINE static void				\
159 	ck_pr_md_store_##S(M *target, T v)			\
160 	{							\
161 		__asm__ __volatile__(I " %1, [%0]"		\
162 					:			\
163 					: "r" (target),		\
164 					  "r" (v)		\
165 					: "memory");		\
166 		return;						\
167 	}
168 
169 CK_PR_STORE(ptr, void, const void *, uint32_t, "str")
170 
171 #define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I)
172 
173 CK_PR_STORE_S(32, uint32_t, "str")
174 CK_PR_STORE_S(16, uint16_t, "strh")
175 CK_PR_STORE_S(8, uint8_t, "strb")
176 CK_PR_STORE_S(uint, unsigned int, "str")
177 CK_PR_STORE_S(int, int, "str")
178 CK_PR_STORE_S(short, short, "strh")
179 CK_PR_STORE_S(char, char, "strb")
180 
181 #undef CK_PR_STORE_S
182 #undef CK_PR_STORE
183 
184 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__)
185 
186 #define CK_PR_DOUBLE_STORE(T, N)				\
187 CK_CC_INLINE static void					\
188 ck_pr_md_store_##N(const T *target, T value)			\
189 {								\
190 	T tmp;							\
191 	uint32_t flag;						\
192 	__asm __volatile("1: 		\n"			\
193 	    		 "ldrexd	%0, [%2]\n"		\
194 			 "strexd	%1, %3, [%2]\n"		\
195 			 "teq		%1, #0\n"		\
196 			 "it ne		\n"			\
197 			 "bne		1b\n"			\
198 				: "=&r" (tmp), "=&r" (flag)	\
199 				: "r" (target), "r" (value)	\
200 				: "memory", "cc");		\
201 }
202 
203 CK_PR_DOUBLE_STORE(uint64_t, 64)
204 #ifndef CK_PR_DISABLE_DOUBLE
205 CK_PR_DOUBLE_STORE(double, double)
206 #endif
207 
208 #undef CK_PR_DOUBLE_STORE
209 
210 #define CK_PR_DOUBLE_CAS_VALUE(T, N)				\
211 CK_CC_INLINE static bool					\
212 ck_pr_cas_##N##_value(T *target, T compare, T set, T *value)	\
213 {								\
214         T previous;						\
215         int tmp;						\
216 								\
217 	__asm__ __volatile__("1:"				\
218 			     "ldrexd %0, [%4];"			\
219 			     "cmp    %Q0, %Q2;"			\
220 			     "ittt eq;"				\
221 			     "cmpeq  %R0, %R2;"			\
222 			     "strexdeq %1, %3, [%4];"		\
223 			     "cmpeq  %1, #1;"			\
224 			     "beq 1b;"				\
225 				:"=&r" (previous), "=&r" (tmp)	\
226 				: "r" (compare), "r" (set) ,	\
227 				  "r"(target)			\
228 				: "memory", "cc");		\
229         *value = previous;					\
230 	return (*value == compare);				\
231 }
232 
233 CK_PR_DOUBLE_CAS_VALUE(uint64_t, 64)
234 #ifndef CK_PR_DISABLE_DOUBLE
235 CK_PR_DOUBLE_CAS_VALUE(double, double)
236 #endif
237 
238 #undef CK_PR_DOUBLE_CAS_VALUE
239 
240 CK_CC_INLINE static bool
241 ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
242 {
243 	uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare);
244 	uint32_t *_set = CK_CPP_CAST(uint32_t *, set);
245 	uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32);
246 	uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32);
247 
248 	return (ck_pr_cas_64_value(CK_CPP_CAST(uint64_t *, target),
249 				   __compare,
250 				   __set,
251 				   CK_CPP_CAST(uint64_t *, value)));
252 }
253 
254 #define CK_PR_DOUBLE_CAS(T, N)  		\
255 CK_CC_INLINE static bool			\
256 ck_pr_cas_##N(T *target, T compare, T set)	\
257 {						\
258 	int ret;				\
259         T tmp;					\
260 						\
261 	__asm__ __volatile__("1:"		\
262 			     "mov %0, #0;"	\
263 			     "ldrexd %1, [%4];"	\
264 			     "cmp    %Q1, %Q2;"	\
265 			     "itttt eq;"	\
266 			     "cmpeq  %R1, %R2;"	\
267 			     "strexdeq %1, %3, [%4];" \
268 			     "moveq %0, #1;"	\
269 			     "cmpeq  %1, #1;"	\
270 			     "beq 1b;"		\
271 			     : "=&r" (ret), "=&r" (tmp) \
272 			     : "r" (compare), "r" (set) , \
273 			       "r"(target)	\
274 			     : "memory", "cc");	\
275 						\
276 	return (ret);				\
277 }
278 
279 CK_PR_DOUBLE_CAS(uint64_t, 64)
280 #ifndef CK_PR_DISABLE_DOUBLE
281 CK_PR_DOUBLE_CAS(double, double)
282 #endif
283 
284 CK_CC_INLINE static bool
285 ck_pr_cas_ptr_2(void *target, void *compare, void *set)
286 {
287 	uint32_t *_compare = CK_CPP_CAST(uint32_t *, compare);
288 	uint32_t *_set = CK_CPP_CAST(uint32_t *, set);
289 	uint64_t __compare = ((uint64_t)_compare[0]) | ((uint64_t)_compare[1] << 32);
290 	uint64_t __set = ((uint64_t)_set[0]) | ((uint64_t)_set[1] << 32);
291 	return (ck_pr_cas_64(CK_CPP_CAST(uint64_t *, target),
292 			     __compare,
293 			     __set));
294 }
295 
296 #endif
297 
298 CK_CC_INLINE static bool
299 ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *value)
300 {
301 	void *previous, *tmp;
302 	__asm__ __volatile__("1:"
303 			     "ldrex %0, [%2];"
304 			     "cmp   %0, %4;"
305 			     "itt eq;"
306 			     "strexeq %1, %3, [%2];"
307 			     "cmpeq   %1, #1;"
308 			     "beq   1b;"
309 			  	: "=&r" (previous),
310 				  "=&r" (tmp)
311 		  		: "r"   (target),
312 				  "r"   (set),
313 				  "r"   (compare)
314 				: "memory", "cc");
315 	*(void **)value = previous;
316 	return (previous == compare);
317 }
318 
319 CK_CC_INLINE static bool
320 ck_pr_cas_ptr(void *target, void *compare, void *set)
321 {
322 	void *previous, *tmp;
323 	__asm__ __volatile__("1:"
324 			     "ldrex %0, [%2];"
325 			     "cmp   %0, %4;"
326 			     "itt eq;"
327 			     "strexeq %1, %3, [%2];"
328 			     "cmpeq   %1, #1;"
329 			     "beq   1b;"
330 			  	: "=&r" (previous),
331 				  "=&r" (tmp)
332 		  		: "r"   (target),
333 				  "r"   (set),
334 				  "r"   (compare)
335 				: "memory", "cc");
336 	return (previous == compare);
337 }
338 
339 #define CK_PR_CAS(N, T, W)						\
340 	CK_CC_INLINE static bool					\
341 	ck_pr_cas_##N##_value(T *target, T compare, T set, T *value)	\
342 	{								\
343 		T previous = 0, tmp = 0;				\
344 		__asm__ __volatile__("1:"				\
345 				     "ldrex" W " %0, [%2];"		\
346 				     "cmp   %0, %4;"			\
347 				     "itt eq;"				\
348 				     "strex" W "eq %1, %3, [%2];"	\
349 		    		     "cmpeq   %1, #1;"			\
350 				     "beq   1b;"			\
351 			/* 						\
352 			 * Using "+&" instead of "=&" to avoid bogus	\
353 			 * clang warnings.				\
354 			 */						\
355 					: "+&r" (previous),		\
356 		    			  "+&r" (tmp)			\
357 					: "r"   (target),		\
358 					  "r"   (set),			\
359 					  "r"   (compare)		\
360 					: "memory", "cc");		\
361 		*value = previous; 					\
362 		return (previous == compare);				\
363 	}								\
364 	CK_CC_INLINE static bool					\
365 	ck_pr_cas_##N(T *target, T compare, T set)			\
366 	{								\
367 		T previous = 0, tmp = 0;				\
368 		__asm__ __volatile__("1:"				\
369 				     "ldrex" W " %0, [%2];"		\
370 				     "cmp   %0, %4;"			\
371 				     "itt eq;"				\
372 				     "strex" W "eq %1, %3, [%2];"	\
373 				     "cmpeq   %1, #1;"			\
374 				     "beq   1b;"			\
375 					: "+&r" (previous),		\
376 		    			  "+&r" (tmp)			\
377 					: "r"   (target),		\
378 					  "r"   (set),			\
379 					  "r"   (compare)		\
380 					: "memory", "cc");		\
381 		return (previous == compare);				\
382 	}
383 
384 CK_PR_CAS(32, uint32_t, "")
385 CK_PR_CAS(uint, unsigned int, "")
386 CK_PR_CAS(int, int, "")
387 CK_PR_CAS(16, uint16_t, "h")
388 CK_PR_CAS(8, uint8_t, "b")
389 CK_PR_CAS(short, short, "h")
390 CK_PR_CAS(char, char, "b")
391 
392 
393 #undef CK_PR_CAS
394 
395 #define CK_PR_FAS(N, M, T, W)					\
396 	CK_CC_INLINE static T					\
397 	ck_pr_fas_##N(M *target, T v)				\
398 	{							\
399 		T previous = 0;					\
400 		T tmp = 0;					\
401 		__asm__ __volatile__("1:"			\
402 				     "ldrex" W " %0, [%2];"	\
403 				     "strex" W " %1, %3, [%2];"	\
404 		    		     "cmp %1, #0;"		\
405 				     "bne 1b;"			\
406 					: "+&r" (previous),	\
407 		    			  "+&r" (tmp) 		\
408 					: "r"   (target),	\
409 					  "r"   (v)		\
410 					: "memory", "cc");	\
411 		return (previous);				\
412 	}
413 
414 CK_PR_FAS(32, uint32_t, uint32_t, "")
415 CK_PR_FAS(ptr, void, void *, "")
416 CK_PR_FAS(int, int, int, "")
417 CK_PR_FAS(uint, unsigned int, unsigned int, "")
418 CK_PR_FAS(16, uint16_t, uint16_t, "h")
419 CK_PR_FAS(8, uint8_t, uint8_t, "b")
420 CK_PR_FAS(short, short, short, "h")
421 CK_PR_FAS(char, char, char, "b")
422 
423 
424 #undef CK_PR_FAS
425 
426 #define CK_PR_UNARY(O, N, M, T, I, W)				\
427 	CK_CC_INLINE static void				\
428 	ck_pr_##O##_##N(M *target)				\
429 	{							\
430 		T previous = 0;					\
431 		T tmp = 0;					\
432 		__asm__ __volatile__("1:"			\
433 				     "ldrex" W " %0, [%2];"	\
434 				      I ";"			\
435 				     "strex" W " %1, %0, [%2];"	\
436 		    		     "cmp   %1, #0;"		\
437 				     "bne   1b;"		\
438 					: "+&r" (previous),	\
439 		    			  "+&r" (tmp)		\
440 					: "r"   (target)	\
441 					: "memory", "cc");	\
442 		return;						\
443 	}
444 
445 CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "")
446 CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "")
447 CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "")
448 CK_PR_UNARY(neg, ptr, void, void *, "neg %0, %0", "")
449 
450 #define CK_PR_UNARY_S(S, T, W)					\
451 	CK_PR_UNARY(inc, S, T, T, "add %0, %0, #1", W)		\
452 	CK_PR_UNARY(dec, S, T, T, "sub %0, %0, #1", W)		\
453 	CK_PR_UNARY(not, S, T, T, "mvn %0, %0", W)		\
454 	CK_PR_UNARY(neg, S, T, T, "neg %0, %0", W)		\
455 
456 CK_PR_UNARY_S(32, uint32_t, "")
457 CK_PR_UNARY_S(uint, unsigned int, "")
458 CK_PR_UNARY_S(int, int, "")
459 CK_PR_UNARY_S(16, uint16_t, "h")
460 CK_PR_UNARY_S(8, uint8_t, "b")
461 CK_PR_UNARY_S(short, short, "h")
462 CK_PR_UNARY_S(char, char, "b")
463 
464 #undef CK_PR_UNARY_S
465 #undef CK_PR_UNARY
466 
467 #define CK_PR_BINARY(O, N, M, T, I, W)				\
468 	CK_CC_INLINE static void				\
469 	ck_pr_##O##_##N(M *target, T delta)			\
470 	{							\
471 		T previous = 0;					\
472 		T tmp = 0;					\
473 		__asm__ __volatile__("1:"			\
474 				     "ldrex" W " %0, [%2];"	\
475 				      I " %0, %0, %3;"		\
476 				     "strex" W " %1, %0, [%2];"	\
477 		    		     "cmp %1, #0;"		\
478 				     "bne 1b;"			\
479 					: "+&r" (previous),	\
480 		    			  "+&r" (tmp)		\
481 					: "r"   (target),	\
482 					  "r"   (delta)		\
483 					: "memory", "cc");	\
484 		return;						\
485 	}
486 
487 CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "")
488 CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "")
489 CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "")
490 CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "")
491 CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "")
492 
493 #define CK_PR_BINARY_S(S, T, W)			\
494 	CK_PR_BINARY(and, S, T, T, "and", W)	\
495 	CK_PR_BINARY(add, S, T, T, "add", W)	\
496 	CK_PR_BINARY(or, S, T, T, "orr", W)	\
497 	CK_PR_BINARY(sub, S, T, T, "sub", W)	\
498 	CK_PR_BINARY(xor, S, T, T, "eor", W)
499 
500 CK_PR_BINARY_S(32, uint32_t, "")
501 CK_PR_BINARY_S(uint, unsigned int, "")
502 CK_PR_BINARY_S(int, int, "")
503 CK_PR_BINARY_S(16, uint16_t, "h")
504 CK_PR_BINARY_S(8, uint8_t, "b")
505 CK_PR_BINARY_S(short, short, "h")
506 CK_PR_BINARY_S(char, char, "b")
507 
508 #undef CK_PR_BINARY_S
509 #undef CK_PR_BINARY
510 
511 CK_CC_INLINE static void *
512 ck_pr_faa_ptr(void *target, uintptr_t delta)
513 {
514 	uintptr_t previous, r, tmp;
515 
516 	__asm__ __volatile__("1:"
517 			     "ldrex %0, [%3];"
518 			     "add %1, %4, %0;"
519 			     "strex %2, %1, [%3];"
520 			     "cmp %2, #0;"
521 			     "bne  1b;"
522 				: "=&r" (previous),
523 				  "=&r" (r),
524 				  "=&r" (tmp)
525 				: "r"   (target),
526 				  "r"   (delta)
527 				: "memory", "cc");
528 
529 	return (void *)(previous);
530 }
531 
532 #define CK_PR_FAA(S, T, W)						\
533 	CK_CC_INLINE static T						\
534 	ck_pr_faa_##S(T *target, T delta)				\
535 	{								\
536 		T previous = 0, r = 0, tmp = 0;				\
537 		__asm__ __volatile__("1:"				\
538 				     "ldrex" W " %0, [%3];"		\
539 				     "add %1, %4, %0;"			\
540 				     "strex" W " %2, %1, [%3];"		\
541 		    		     "cmp %2, #0;"			\
542 				     "bne  1b;"				\
543 					: "+&r" (previous),		\
544 					  "+&r" (r),			\
545 		    			  "+&r" (tmp)			\
546 					: "r"   (target),		\
547 					  "r"   (delta)			\
548 					: "memory", "cc");		\
549 		return (previous);					\
550 	}
551 
552 CK_PR_FAA(32, uint32_t, "")
553 CK_PR_FAA(uint, unsigned int, "")
554 CK_PR_FAA(int, int, "")
555 CK_PR_FAA(16, uint16_t, "h")
556 CK_PR_FAA(8, uint8_t, "b")
557 CK_PR_FAA(short, short, "h")
558 CK_PR_FAA(char, char, "b")
559 
560 #undef CK_PR_FAA
561 
562 #endif /* CK_PR_ARM_H */
563 
564