xref: /freebsd/sys/contrib/ck/include/gcc/aarch64/ck_pr.h (revision 61e21613)
1 /*
2  * Copyright 2009-2016 Samy Al Bahra.
3  * Copyright 2013-2016 Olivier Houchard.
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #ifndef CK_PR_AARCH64_H
29 #define CK_PR_AARCH64_H
30 
31 #ifndef CK_PR_H
32 #error Do not include this file directly, use ck_pr.h
33 #endif
34 
35 #include <ck_cc.h>
36 #include <ck_md.h>
37 
38 /*
39  * The following represent supported atomic operations.
40  * These operations may be emulated.
41  */
42 #include "ck_f_pr.h"
43 
44 /*
45  * Minimum interface requirement met.
46  */
47 #define CK_F_PR
48 
49 CK_CC_INLINE static void
50 ck_pr_stall(void)
51 {
52 
53 	__asm__ __volatile__("" ::: "memory");
54 	return;
55 }
56 
57 #define CK_DMB_SY __asm __volatile("dmb ish" : : "r" (0) : "memory")
58 #define CK_DMB_LD __asm __volatile("dmb ishld" : : "r" (0) : "memory")
59 #define CK_DMB_ST __asm __volatile("dmb ishst" : : "r" (0) : "memory")
60 
61 #define CK_PR_FENCE(T, I)				\
62 	CK_CC_INLINE static void			\
63 	ck_pr_fence_strict_##T(void)			\
64 	{						\
65 		I;					\
66 	}
67 
68 CK_PR_FENCE(atomic, CK_DMB_ST)
69 CK_PR_FENCE(atomic_store, CK_DMB_ST)
70 CK_PR_FENCE(atomic_load, CK_DMB_SY)
71 CK_PR_FENCE(store_atomic, CK_DMB_ST)
72 CK_PR_FENCE(load_atomic, CK_DMB_SY)
73 CK_PR_FENCE(store, CK_DMB_ST)
74 CK_PR_FENCE(store_load, CK_DMB_SY)
75 CK_PR_FENCE(load, CK_DMB_LD)
76 CK_PR_FENCE(load_store, CK_DMB_SY)
77 CK_PR_FENCE(memory, CK_DMB_SY)
78 CK_PR_FENCE(acquire, CK_DMB_SY)
79 CK_PR_FENCE(release, CK_DMB_SY)
80 CK_PR_FENCE(acqrel, CK_DMB_SY)
81 CK_PR_FENCE(lock, CK_DMB_SY)
82 CK_PR_FENCE(unlock, CK_DMB_SY)
83 
84 #undef CK_PR_FENCE
85 
86 #undef CK_DMB_SI
87 #undef CK_DMB_LD
88 #undef CK_DMB_ST
89 
90 #define CK_PR_LOAD(S, M, T, I)				\
91 	CK_CC_INLINE static T					\
92 	ck_pr_md_load_##S(const M *target)			\
93 	{							\
94 		long r = 0;					\
95 		__asm__ __volatile__(I " %w0, [%1]\n"		\
96 					: "=r" (r)		\
97 					: "r"  (target)		\
98 					: "memory");		\
99 		return ((T)r);					\
100 	}
101 #define CK_PR_LOAD_64(S, M, T, I)				\
102 	CK_CC_INLINE static T					\
103 	ck_pr_md_load_##S(const M *target)			\
104 	{							\
105 		long r = 0;					\
106 		__asm__ __volatile__(I " %0, [%1]\n"		\
107 					: "=r" (r)		\
108 					: "r"  (target)		\
109 					: "memory");		\
110 		return ((T)r);					\
111 	}
112 
113 
114 CK_PR_LOAD_64(ptr, void, void *, "ldr")
115 
116 #define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, I)
117 #define CK_PR_LOAD_S_64(S, T, I) CK_PR_LOAD_64(S, T, T, I)
118 
119 CK_PR_LOAD_S_64(64, uint64_t, "ldr")
120 CK_PR_LOAD_S(32, uint32_t, "ldr")
121 CK_PR_LOAD_S(16, uint16_t, "ldrh")
122 CK_PR_LOAD_S(8, uint8_t, "ldrb")
123 CK_PR_LOAD_S(uint, unsigned int, "ldr")
124 CK_PR_LOAD_S(int, int, "ldr")
125 CK_PR_LOAD_S(short, short, "ldrh")
126 CK_PR_LOAD_S(char, char, "ldrb")
127 #ifndef CK_PR_DISABLE_DOUBLE
128 CK_PR_LOAD_S_64(double, double, "ldr")
129 #endif
130 
131 #undef CK_PR_LOAD_S
132 #undef CK_PR_LOAD_S_64
133 #undef CK_PR_LOAD
134 #undef CK_PR_LAOD_64
135 
136 #define CK_PR_STORE(S, M, T, I)					\
137 	CK_CC_INLINE static void				\
138 	ck_pr_md_store_##S(M *target, T v)			\
139 	{							\
140 		__asm__ __volatile__(I " %w1, [%0]"		\
141 					:			\
142 					: "r" (target),		\
143 					  "r" (v)		\
144 					: "memory");		\
145 		return;						\
146 	}
147 #define CK_PR_STORE_64(S, M, T, I)				\
148 	CK_CC_INLINE static void				\
149 	ck_pr_md_store_##S(M *target, T v)			\
150 	{							\
151 		__asm__ __volatile__(I " %1, [%0]"		\
152 					:			\
153 					: "r" (target),		\
154 					  "r" (v)		\
155 					: "memory");		\
156 		return;						\
157 	}
158 
159 CK_PR_STORE_64(ptr, void, const void *, "str")
160 
161 #define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, I)
162 #define CK_PR_STORE_S_64(S, T, I) CK_PR_STORE_64(S, T, T, I)
163 
164 CK_PR_STORE_S_64(64, uint64_t, "str")
165 CK_PR_STORE_S(32, uint32_t, "str")
166 CK_PR_STORE_S(16, uint16_t, "strh")
167 CK_PR_STORE_S(8, uint8_t, "strb")
168 CK_PR_STORE_S(uint, unsigned int, "str")
169 CK_PR_STORE_S(int, int, "str")
170 CK_PR_STORE_S(short, short, "strh")
171 CK_PR_STORE_S(char, char, "strb")
172 #ifndef CK_PR_DISABLE_DOUBLE
173 CK_PR_STORE_S_64(double, double, "str")
174 #endif
175 
176 #undef CK_PR_STORE_S
177 #undef CK_PR_STORE_S_64
178 #undef CK_PR_STORE
179 #undef CK_PR_STORE_64
180 
181 #ifdef CK_MD_LSE_ENABLE
182 #include "ck_pr_lse.h"
183 #else
184 #include "ck_pr_llsc.h"
185 #endif
186 
187 /*
188  * ck_pr_neg_*() functions can only be implemented via LL/SC, as there are no
189  * LSE alternatives.
190  */
191 #define CK_PR_NEG(N, M, T, W, R)				\
192         CK_CC_INLINE static void				\
193         ck_pr_neg_##N(M *target)				\
194         {							\
195                 T previous = 0;					\
196                 T tmp = 0;					\
197                 __asm__ __volatile__("1:"			\
198                                      "ldxr" W " %" R "0, [%2]\n"\
199                                      "neg %" R "0, %" R "0\n"	\
200                                      "stxr" W " %w1, %" R "0, [%2]\n"	\
201                                      "cbnz %w1, 1b\n"		\
202                                         : "=&r" (previous),	\
203                                           "=&r" (tmp)		\
204                                         : "r"   (target)	\
205                                         : "memory", "cc");	\
206                 return;						\
207         }
208 
209 CK_PR_NEG(ptr, void, void *, "", "")
210 CK_PR_NEG(64, uint64_t, uint64_t, "", "")
211 
212 #define CK_PR_NEG_S(S, T, W)					\
213         CK_PR_NEG(S, T, T, W, "w")				\
214 
215 CK_PR_NEG_S(32, uint32_t, "")
216 CK_PR_NEG_S(uint, unsigned int, "")
217 CK_PR_NEG_S(int, int, "")
218 CK_PR_NEG_S(16, uint16_t, "h")
219 CK_PR_NEG_S(8, uint8_t, "b")
220 CK_PR_NEG_S(short, short, "h")
221 CK_PR_NEG_S(char, char, "b")
222 
223 #undef CK_PR_NEG_S
224 #undef CK_PR_NEG
225 
226 #endif /* CK_PR_AARCH64_H */
227 
228