1 /*===---- arm_acle.h - ARM Non-Neon intrinsics -----------------------------===
2  *
3  * Permission is hereby granted, free of charge, to any person obtaining a copy
4  * of this software and associated documentation files (the "Software"), to deal
5  * in the Software without restriction, including without limitation the rights
6  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7  * copies of the Software, and to permit persons to whom the Software is
8  * furnished to do so, subject to the following conditions:
9  *
10  * The above copyright notice and this permission notice shall be included in
11  * all copies or substantial portions of the Software.
12  *
13  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19  * THE SOFTWARE.
20  *
21  *===-----------------------------------------------------------------------===
22  */
23 
24 #ifndef __ARM_ACLE_H
25 #define __ARM_ACLE_H
26 
27 #ifndef __ARM_ACLE
28 #error "ACLE intrinsics support not enabled."
29 #endif
30 
31 #include <stdint.h>
32 
33 #if defined(__cplusplus)
34 extern "C" {
35 #endif
36 
37 /* 8 SYNCHRONIZATION, BARRIER AND HINT INTRINSICS */
38 /* 8.3 Memory barriers */
39 #if !defined(_MSC_VER)
40 #define __dmb(i) __builtin_arm_dmb(i)
41 #define __dsb(i) __builtin_arm_dsb(i)
42 #define __isb(i) __builtin_arm_isb(i)
43 #endif
44 
45 /* 8.4 Hints */
46 
47 #if !defined(_MSC_VER)
__wfi(void)48 static __inline__ void __attribute__((always_inline, nodebug)) __wfi(void) {
49   __builtin_arm_wfi();
50 }
51 
__wfe(void)52 static __inline__ void __attribute__((always_inline, nodebug)) __wfe(void) {
53   __builtin_arm_wfe();
54 }
55 
__sev(void)56 static __inline__ void __attribute__((always_inline, nodebug)) __sev(void) {
57   __builtin_arm_sev();
58 }
59 
__sevl(void)60 static __inline__ void __attribute__((always_inline, nodebug)) __sevl(void) {
61   __builtin_arm_sevl();
62 }
63 
__yield(void)64 static __inline__ void __attribute__((always_inline, nodebug)) __yield(void) {
65   __builtin_arm_yield();
66 }
67 #endif
68 
69 #if __ARM_32BIT_STATE
70 #define __dbg(t) __builtin_arm_dbg(t)
71 #endif
72 
73 /* 8.5 Swap */
74 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__swp(uint32_t x,volatile uint32_t * p)75   __swp(uint32_t x, volatile uint32_t *p) {
76   uint32_t v;
77   do v = __builtin_arm_ldrex(p); while (__builtin_arm_strex(x, p));
78   return v;
79 }
80 
81 /* 8.6 Memory prefetch intrinsics */
82 /* 8.6.1 Data prefetch */
83 #define __pld(addr) __pldx(0, 0, 0, addr)
84 
85 #if __ARM_32BIT_STATE
86 #define __pldx(access_kind, cache_level, retention_policy, addr) \
87   __builtin_arm_prefetch(addr, access_kind, 1)
88 #else
89 #define __pldx(access_kind, cache_level, retention_policy, addr) \
90   __builtin_arm_prefetch(addr, access_kind, cache_level, retention_policy, 1)
91 #endif
92 
93 /* 8.6.2 Instruction prefetch */
94 #define __pli(addr) __plix(0, 0, addr)
95 
96 #if __ARM_32BIT_STATE
97 #define __plix(cache_level, retention_policy, addr) \
98   __builtin_arm_prefetch(addr, 0, 0)
99 #else
100 #define __plix(cache_level, retention_policy, addr) \
101   __builtin_arm_prefetch(addr, 0, cache_level, retention_policy, 0)
102 #endif
103 
104 /* 8.7 NOP */
__nop(void)105 static __inline__ void __attribute__((always_inline, nodebug)) __nop(void) {
106   __builtin_arm_nop();
107 }
108 
109 /* 9 DATA-PROCESSING INTRINSICS */
110 /* 9.2 Miscellaneous data-processing intrinsics */
111 /* ROR */
112 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__ror(uint32_t x,uint32_t y)113   __ror(uint32_t x, uint32_t y) {
114   y %= 32;
115   if (y == 0)  return x;
116   return (x >> y) | (x << (32 - y));
117 }
118 
119 static __inline__ uint64_t __attribute__((always_inline, nodebug))
__rorll(uint64_t x,uint32_t y)120   __rorll(uint64_t x, uint32_t y) {
121   y %= 64;
122   if (y == 0)  return x;
123   return (x >> y) | (x << (64 - y));
124 }
125 
126 static __inline__ unsigned long __attribute__((always_inline, nodebug))
__rorl(unsigned long x,uint32_t y)127   __rorl(unsigned long x, uint32_t y) {
128 #if __SIZEOF_LONG__ == 4
129   return __ror(x, y);
130 #else
131   return __rorll(x, y);
132 #endif
133 }
134 
135 
136 /* CLZ */
137 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__clz(uint32_t t)138   __clz(uint32_t t) {
139   return __builtin_clz(t);
140 }
141 
142 static __inline__ unsigned long __attribute__((always_inline, nodebug))
__clzl(unsigned long t)143   __clzl(unsigned long t) {
144   return __builtin_clzl(t);
145 }
146 
147 static __inline__ uint64_t __attribute__((always_inline, nodebug))
__clzll(uint64_t t)148   __clzll(uint64_t t) {
149   return __builtin_clzll(t);
150 }
151 
152 /* REV */
153 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__rev(uint32_t t)154   __rev(uint32_t t) {
155   return __builtin_bswap32(t);
156 }
157 
158 static __inline__ unsigned long __attribute__((always_inline, nodebug))
__revl(unsigned long t)159   __revl(unsigned long t) {
160 #if __SIZEOF_LONG__ == 4
161   return __builtin_bswap32(t);
162 #else
163   return __builtin_bswap64(t);
164 #endif
165 }
166 
167 static __inline__ uint64_t __attribute__((always_inline, nodebug))
__revll(uint64_t t)168   __revll(uint64_t t) {
169   return __builtin_bswap64(t);
170 }
171 
172 /* REV16 */
173 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__rev16(uint32_t t)174   __rev16(uint32_t t) {
175   return __ror(__rev(t), 16);
176 }
177 
178 static __inline__ unsigned long __attribute__((always_inline, nodebug))
__rev16l(unsigned long t)179   __rev16l(unsigned long t) {
180     return __rorl(__revl(t), sizeof(long) / 2);
181 }
182 
183 static __inline__ uint64_t __attribute__((always_inline, nodebug))
__rev16ll(uint64_t t)184   __rev16ll(uint64_t t) {
185   return __rorll(__revll(t), 32);
186 }
187 
188 /* REVSH */
189 static __inline__ int16_t __attribute__((always_inline, nodebug))
__revsh(int16_t t)190   __revsh(int16_t t) {
191   return __builtin_bswap16(t);
192 }
193 
194 /* RBIT */
195 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__rbit(uint32_t t)196   __rbit(uint32_t t) {
197   return __builtin_arm_rbit(t);
198 }
199 
200 static __inline__ uint64_t __attribute__((always_inline, nodebug))
__rbitll(uint64_t t)201   __rbitll(uint64_t t) {
202 #if __ARM_32BIT_STATE
203   return (((uint64_t) __builtin_arm_rbit(t)) << 32) |
204     __builtin_arm_rbit(t >> 32);
205 #else
206   return __builtin_arm_rbit64(t);
207 #endif
208 }
209 
210 static __inline__ unsigned long __attribute__((always_inline, nodebug))
__rbitl(unsigned long t)211   __rbitl(unsigned long t) {
212 #if __SIZEOF_LONG__ == 4
213   return __rbit(t);
214 #else
215   return __rbitll(t);
216 #endif
217 }
218 
219 /*
220  * 9.4 Saturating intrinsics
221  *
222  * FIXME: Change guard to their corrosponding __ARM_FEATURE flag when Q flag
223  * intrinsics are implemented and the flag is enabled.
224  */
225 /* 9.4.1 Width-specified saturation intrinsics */
226 #if __ARM_32BIT_STATE
227 #define __ssat(x, y) __builtin_arm_ssat(x, y)
228 #define __usat(x, y) __builtin_arm_usat(x, y)
229 #endif
230 
231 /* 9.4.2 Saturating addition and subtraction intrinsics */
232 #if __ARM_32BIT_STATE
233 static __inline__ int32_t __attribute__((always_inline, nodebug))
__qadd(int32_t t,int32_t v)234   __qadd(int32_t t, int32_t v) {
235   return __builtin_arm_qadd(t, v);
236 }
237 
238 static __inline__ int32_t __attribute__((always_inline, nodebug))
__qsub(int32_t t,int32_t v)239   __qsub(int32_t t, int32_t v) {
240   return __builtin_arm_qsub(t, v);
241 }
242 
243 static __inline__ int32_t __attribute__((always_inline, nodebug))
__qdbl(int32_t t)244 __qdbl(int32_t t) {
245   return __builtin_arm_qadd(t, t);
246 }
247 #endif
248 
249 /* 9.7 CRC32 intrinsics */
250 #if __ARM_FEATURE_CRC32
251 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32b(uint32_t a,uint8_t b)252   __crc32b(uint32_t a, uint8_t b) {
253   return __builtin_arm_crc32b(a, b);
254 }
255 
256 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32h(uint32_t a,uint16_t b)257   __crc32h(uint32_t a, uint16_t b) {
258   return __builtin_arm_crc32h(a, b);
259 }
260 
261 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32w(uint32_t a,uint32_t b)262   __crc32w(uint32_t a, uint32_t b) {
263   return __builtin_arm_crc32w(a, b);
264 }
265 
266 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32d(uint32_t a,uint64_t b)267   __crc32d(uint32_t a, uint64_t b) {
268   return __builtin_arm_crc32d(a, b);
269 }
270 
271 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32cb(uint32_t a,uint8_t b)272   __crc32cb(uint32_t a, uint8_t b) {
273   return __builtin_arm_crc32cb(a, b);
274 }
275 
276 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32ch(uint32_t a,uint16_t b)277   __crc32ch(uint32_t a, uint16_t b) {
278   return __builtin_arm_crc32ch(a, b);
279 }
280 
281 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32cw(uint32_t a,uint32_t b)282   __crc32cw(uint32_t a, uint32_t b) {
283   return __builtin_arm_crc32cw(a, b);
284 }
285 
286 static __inline__ uint32_t __attribute__((always_inline, nodebug))
__crc32cd(uint32_t a,uint64_t b)287   __crc32cd(uint32_t a, uint64_t b) {
288   return __builtin_arm_crc32cd(a, b);
289 }
290 #endif
291 
292 #if defined(__cplusplus)
293 }
294 #endif
295 
296 #endif /* __ARM_ACLE_H */
297