1 /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9
10 #ifndef __WASM_SIMD128_H
11 #define __WASM_SIMD128_H
12
13 #include <stdbool.h>
14 #include <stdint.h>
15
16 // User-facing type
17 typedef int32_t v128_t __attribute__((__vector_size__(16), __aligned__(16)));
18
19 // Internal types determined by clang builtin definitions
20 typedef int32_t __v128_u __attribute__((__vector_size__(16), __aligned__(1)));
21 typedef signed char __i8x16
22 __attribute__((__vector_size__(16), __aligned__(16)));
23 typedef unsigned char __u8x16
24 __attribute__((__vector_size__(16), __aligned__(16)));
25 typedef short __i16x8 __attribute__((__vector_size__(16), __aligned__(16)));
26 typedef unsigned short __u16x8
27 __attribute__((__vector_size__(16), __aligned__(16)));
28 typedef int __i32x4 __attribute__((__vector_size__(16), __aligned__(16)));
29 typedef unsigned int __u32x4
30 __attribute__((__vector_size__(16), __aligned__(16)));
31 typedef long long __i64x2 __attribute__((__vector_size__(16), __aligned__(16)));
32 typedef unsigned long long __u64x2
33 __attribute__((__vector_size__(16), __aligned__(16)));
34 typedef float __f32x4 __attribute__((__vector_size__(16), __aligned__(16)));
35 typedef double __f64x2 __attribute__((__vector_size__(16), __aligned__(16)));
36
37 typedef signed char __i8x8 __attribute__((__vector_size__(8), __aligned__(8)));
38 typedef unsigned char __u8x8
39 __attribute__((__vector_size__(8), __aligned__(8)));
40 typedef short __i16x4 __attribute__((__vector_size__(8), __aligned__(8)));
41 typedef unsigned short __u16x4
42 __attribute__((__vector_size__(8), __aligned__(8)));
43 typedef int __i32x2 __attribute__((__vector_size__(8), __aligned__(8)));
44 typedef unsigned int __u32x2
45 __attribute__((__vector_size__(8), __aligned__(8)));
46
47 #define __DEFAULT_FN_ATTRS \
48 __attribute__((__always_inline__, __nodebug__, __target__("simd128"), \
49 __min_vector_width__(128)))
50
51 #define __REQUIRE_CONSTANT(c) \
52 __attribute__((__diagnose_if__(!__builtin_constant_p(c), \
53 #c " must be constant", "error")))
54
wasm_v128_load(const void * __mem)55 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load(const void *__mem) {
56 // UB-free unaligned access copied from xmmintrin.h
57 struct __wasm_v128_load_struct {
58 __v128_u __v;
59 } __attribute__((__packed__, __may_alias__));
60 return ((const struct __wasm_v128_load_struct *)__mem)->__v;
61 }
62
63 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load8_splat(const void * __mem)64 wasm_v128_load8_splat(const void *__mem) {
65 struct __wasm_v128_load8_splat_struct {
66 uint8_t __v;
67 } __attribute__((__packed__, __may_alias__));
68 uint8_t __v = ((const struct __wasm_v128_load8_splat_struct *)__mem)->__v;
69 return (v128_t)(__u8x16){__v, __v, __v, __v, __v, __v, __v, __v,
70 __v, __v, __v, __v, __v, __v, __v, __v};
71 }
72
73 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load16_splat(const void * __mem)74 wasm_v128_load16_splat(const void *__mem) {
75 struct __wasm_v128_load16_splat_struct {
76 uint16_t __v;
77 } __attribute__((__packed__, __may_alias__));
78 uint16_t __v = ((const struct __wasm_v128_load16_splat_struct *)__mem)->__v;
79 return (v128_t)(__u16x8){__v, __v, __v, __v, __v, __v, __v, __v};
80 }
81
82 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_splat(const void * __mem)83 wasm_v128_load32_splat(const void *__mem) {
84 struct __wasm_v128_load32_splat_struct {
85 uint32_t __v;
86 } __attribute__((__packed__, __may_alias__));
87 uint32_t __v = ((const struct __wasm_v128_load32_splat_struct *)__mem)->__v;
88 return (v128_t)(__u32x4){__v, __v, __v, __v};
89 }
90
91 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_splat(const void * __mem)92 wasm_v128_load64_splat(const void *__mem) {
93 struct __wasm_v128_load64_splat_struct {
94 uint64_t __v;
95 } __attribute__((__packed__, __may_alias__));
96 uint64_t __v = ((const struct __wasm_v128_load64_splat_struct *)__mem)->__v;
97 return (v128_t)(__u64x2){__v, __v};
98 }
99
100 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_load8x8(const void * __mem)101 wasm_i16x8_load8x8(const void *__mem) {
102 struct __wasm_i16x8_load8x8_struct {
103 __i8x8 __v;
104 } __attribute__((__packed__, __may_alias__));
105 __i8x8 __v = ((const struct __wasm_i16x8_load8x8_struct *)__mem)->__v;
106 return (v128_t) __builtin_convertvector(__v, __i16x8);
107 }
108
109 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_load8x8(const void * __mem)110 wasm_u16x8_load8x8(const void *__mem) {
111 struct __wasm_u16x8_load8x8_struct {
112 __u8x8 __v;
113 } __attribute__((__packed__, __may_alias__));
114 __u8x8 __v = ((const struct __wasm_u16x8_load8x8_struct *)__mem)->__v;
115 return (v128_t) __builtin_convertvector(__v, __u16x8);
116 }
117
118 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_load16x4(const void * __mem)119 wasm_i32x4_load16x4(const void *__mem) {
120 struct __wasm_i32x4_load16x4_struct {
121 __i16x4 __v;
122 } __attribute__((__packed__, __may_alias__));
123 __i16x4 __v = ((const struct __wasm_i32x4_load16x4_struct *)__mem)->__v;
124 return (v128_t) __builtin_convertvector(__v, __i32x4);
125 }
126
127 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_load16x4(const void * __mem)128 wasm_u32x4_load16x4(const void *__mem) {
129 struct __wasm_u32x4_load16x4_struct {
130 __u16x4 __v;
131 } __attribute__((__packed__, __may_alias__));
132 __u16x4 __v = ((const struct __wasm_u32x4_load16x4_struct *)__mem)->__v;
133 return (v128_t) __builtin_convertvector(__v, __u32x4);
134 }
135
136 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_load32x2(const void * __mem)137 wasm_i64x2_load32x2(const void *__mem) {
138 struct __wasm_i64x2_load32x2_struct {
139 __i32x2 __v;
140 } __attribute__((__packed__, __may_alias__));
141 __i32x2 __v = ((const struct __wasm_i64x2_load32x2_struct *)__mem)->__v;
142 return (v128_t) __builtin_convertvector(__v, __i64x2);
143 }
144
145 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_load32x2(const void * __mem)146 wasm_u64x2_load32x2(const void *__mem) {
147 struct __wasm_u64x2_load32x2_struct {
148 __u32x2 __v;
149 } __attribute__((__packed__, __may_alias__));
150 __u32x2 __v = ((const struct __wasm_u64x2_load32x2_struct *)__mem)->__v;
151 return (v128_t) __builtin_convertvector(__v, __u64x2);
152 }
153
154 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_zero(const void * __mem)155 wasm_v128_load32_zero(const void *__mem) {
156 struct __wasm_v128_load32_zero_struct {
157 int32_t __v;
158 } __attribute__((__packed__, __may_alias__));
159 int32_t __v = ((const struct __wasm_v128_load32_zero_struct *)__mem)->__v;
160 return (v128_t)(__i32x4){__v, 0, 0, 0};
161 }
162
163 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_zero(const void * __mem)164 wasm_v128_load64_zero(const void *__mem) {
165 struct __wasm_v128_load64_zero_struct {
166 int64_t __v;
167 } __attribute__((__packed__, __may_alias__));
168 int64_t __v = ((const struct __wasm_v128_load64_zero_struct *)__mem)->__v;
169 return (v128_t)(__i64x2){__v, 0};
170 }
171
172 #define wasm_v128_load8_lane(__ptr, __vec, __i) \
173 ((v128_t)__builtin_wasm_load8_lane((const signed char *)(__ptr), \
174 (__i8x16)(__vec), (__i)))
175
176 #define wasm_v128_load16_lane(__ptr, __vec, __i) \
177 ((v128_t)__builtin_wasm_load16_lane((const short *)(__ptr), \
178 (__i16x8)(__vec), (__i)))
179
180 #define wasm_v128_load32_lane(__ptr, __vec, __i) \
181 ((v128_t)__builtin_wasm_load32_lane((const int *)(__ptr), (__i32x4)(__vec), \
182 (__i)))
183
184 #define wasm_v128_load64_lane(__ptr, __vec, __i) \
185 ((v128_t)__builtin_wasm_load64_lane((const long long int *)(__ptr), \
186 (__i64x2)(__vec), (__i)))
187
wasm_v128_store(void * __mem,v128_t __a)188 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store(void *__mem,
189 v128_t __a) {
190 // UB-free unaligned access copied from xmmintrin.h
191 struct __wasm_v128_store_struct {
192 __v128_u __v;
193 } __attribute__((__packed__, __may_alias__));
194 ((struct __wasm_v128_store_struct *)__mem)->__v = __a;
195 }
196
197 #define wasm_v128_store8_lane(__ptr, __vec, __i) \
198 (__builtin_wasm_store8_lane((signed char *)(__ptr), (__i8x16)(__vec), (__i)))
199
200 #define wasm_v128_store16_lane(__ptr, __vec, __i) \
201 (__builtin_wasm_store16_lane((short *)(__ptr), (__i16x8)(__vec), (__i)))
202
203 #define wasm_v128_store32_lane(__ptr, __vec, __i) \
204 (__builtin_wasm_store32_lane((int *)(__ptr), (__i32x4)(__vec), (__i)))
205
206 #define wasm_v128_store64_lane(__ptr, __vec, __i) \
207 (__builtin_wasm_store64_lane((long long int *)(__ptr), (__i64x2)(__vec), \
208 (__i)))
209
210 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_make(int8_t __c0,int8_t __c1,int8_t __c2,int8_t __c3,int8_t __c4,int8_t __c5,int8_t __c6,int8_t __c7,int8_t __c8,int8_t __c9,int8_t __c10,int8_t __c11,int8_t __c12,int8_t __c13,int8_t __c14,int8_t __c15)211 wasm_i8x16_make(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3, int8_t __c4,
212 int8_t __c5, int8_t __c6, int8_t __c7, int8_t __c8, int8_t __c9,
213 int8_t __c10, int8_t __c11, int8_t __c12, int8_t __c13,
214 int8_t __c14, int8_t __c15) {
215 return (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5,
216 __c6, __c7, __c8, __c9, __c10, __c11,
217 __c12, __c13, __c14, __c15};
218 }
219
220 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_make(int16_t __c0,int16_t __c1,int16_t __c2,int16_t __c3,int16_t __c4,int16_t __c5,int16_t __c6,int16_t __c7)221 wasm_i16x8_make(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3,
222 int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7) {
223 return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7};
224 }
225
wasm_i32x4_make(int32_t __c0,int32_t __c1,int32_t __c2,int32_t __c3)226 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_make(int32_t __c0,
227 int32_t __c1,
228 int32_t __c2,
229 int32_t __c3) {
230 return (v128_t)(__i32x4){__c0, __c1, __c2, __c3};
231 }
232
wasm_i64x2_make(int64_t __c0,int64_t __c1)233 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_make(int64_t __c0,
234 int64_t __c1) {
235 return (v128_t)(__i64x2){__c0, __c1};
236 }
237
wasm_f32x4_make(float __c0,float __c1,float __c2,float __c3)238 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_make(float __c0,
239 float __c1,
240 float __c2,
241 float __c3) {
242 return (v128_t)(__f32x4){__c0, __c1, __c2, __c3};
243 }
244
wasm_f64x2_make(double __c0,double __c1)245 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_make(double __c0,
246 double __c1) {
247 return (v128_t)(__f64x2){__c0, __c1};
248 }
249
250 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_const(int8_t __c0,int8_t __c1,int8_t __c2,int8_t __c3,int8_t __c4,int8_t __c5,int8_t __c6,int8_t __c7,int8_t __c8,int8_t __c9,int8_t __c10,int8_t __c11,int8_t __c12,int8_t __c13,int8_t __c14,int8_t __c15)251 wasm_i8x16_const(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3,
252 int8_t __c4, int8_t __c5, int8_t __c6, int8_t __c7,
253 int8_t __c8, int8_t __c9, int8_t __c10, int8_t __c11,
254 int8_t __c12, int8_t __c13, int8_t __c14, int8_t __c15)
255 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
256 __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4)
257 __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6)
258 __REQUIRE_CONSTANT(__c7) __REQUIRE_CONSTANT(__c8)
259 __REQUIRE_CONSTANT(__c9) __REQUIRE_CONSTANT(__c10)
260 __REQUIRE_CONSTANT(__c11) __REQUIRE_CONSTANT(__c12)
261 __REQUIRE_CONSTANT(__c13) __REQUIRE_CONSTANT(__c14)
262 __REQUIRE_CONSTANT(__c15) {
263 return (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5,
264 __c6, __c7, __c8, __c9, __c10, __c11,
265 __c12, __c13, __c14, __c15};
266 }
267
268 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_const(int16_t __c0,int16_t __c1,int16_t __c2,int16_t __c3,int16_t __c4,int16_t __c5,int16_t __c6,int16_t __c7)269 wasm_i16x8_const(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3,
270 int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7)
271 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
272 __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4)
273 __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6)
274 __REQUIRE_CONSTANT(__c7) {
275 return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7};
276 }
277
278 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_const(int32_t __c0,int32_t __c1,int32_t __c2,int32_t __c3)279 wasm_i32x4_const(int32_t __c0, int32_t __c1, int32_t __c2, int32_t __c3)
280 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
281 __REQUIRE_CONSTANT(__c3) {
282 return (v128_t)(__i32x4){__c0, __c1, __c2, __c3};
283 }
284
wasm_i64x2_const(int64_t __c0,int64_t __c1)285 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const(int64_t __c0,
286 int64_t __c1)
287 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) {
288 return (v128_t)(__i64x2){__c0, __c1};
289 }
290
291 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_const(float __c0,float __c1,float __c2,float __c3)292 wasm_f32x4_const(float __c0, float __c1, float __c2, float __c3)
293 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
294 __REQUIRE_CONSTANT(__c3) {
295 return (v128_t)(__f32x4){__c0, __c1, __c2, __c3};
296 }
297
wasm_f64x2_const(double __c0,double __c1)298 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const(double __c0,
299 double __c1)
300 __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) {
301 return (v128_t)(__f64x2){__c0, __c1};
302 }
303
wasm_i8x16_const_splat(int8_t __c)304 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_const_splat(int8_t __c)
305 __REQUIRE_CONSTANT(__c) {
306 return (v128_t)(__i8x16){__c, __c, __c, __c, __c, __c, __c, __c,
307 __c, __c, __c, __c, __c, __c, __c, __c};
308 }
309
wasm_i16x8_const_splat(int16_t __c)310 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_const_splat(int16_t __c)
311 __REQUIRE_CONSTANT(__c) {
312 return (v128_t)(__i16x8){__c, __c, __c, __c, __c, __c, __c, __c};
313 }
314
wasm_i32x4_const_splat(int32_t __c)315 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_const_splat(int32_t __c)
316 __REQUIRE_CONSTANT(__c) {
317 return (v128_t)(__i32x4){__c, __c, __c, __c};
318 }
319
wasm_i64x2_const_splat(int64_t __c)320 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const_splat(int64_t __c)
321 __REQUIRE_CONSTANT(__c) {
322 return (v128_t)(__i64x2){__c, __c};
323 }
324
wasm_f32x4_const_splat(float __c)325 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_const_splat(float __c)
326 __REQUIRE_CONSTANT(__c) {
327 return (v128_t)(__f32x4){__c, __c, __c, __c};
328 }
329
wasm_f64x2_const_splat(double __c)330 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const_splat(double __c)
331 __REQUIRE_CONSTANT(__c) {
332 return (v128_t)(__f64x2){__c, __c};
333 }
334
wasm_i8x16_splat(int8_t __a)335 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_splat(int8_t __a) {
336 return (v128_t)(__i8x16){__a, __a, __a, __a, __a, __a, __a, __a,
337 __a, __a, __a, __a, __a, __a, __a, __a};
338 }
339
340 #define wasm_i8x16_extract_lane(__a, __i) \
341 (__builtin_wasm_extract_lane_s_i8x16((__i8x16)(__a), __i))
342
343 #define wasm_u8x16_extract_lane(__a, __i) \
344 (__builtin_wasm_extract_lane_u_i8x16((__u8x16)(__a), __i))
345
346 #define wasm_i8x16_replace_lane(__a, __i, __b) \
347 ((v128_t)__builtin_wasm_replace_lane_i8x16((__i8x16)(__a), __i, __b))
348
wasm_i16x8_splat(int16_t __a)349 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_splat(int16_t __a) {
350 return (v128_t)(__i16x8){__a, __a, __a, __a, __a, __a, __a, __a};
351 }
352
353 #define wasm_i16x8_extract_lane(__a, __i) \
354 (__builtin_wasm_extract_lane_s_i16x8((__i16x8)(__a), __i))
355
356 #define wasm_u16x8_extract_lane(__a, __i) \
357 (__builtin_wasm_extract_lane_u_i16x8((__u16x8)(__a), __i))
358
359 #define wasm_i16x8_replace_lane(__a, __i, __b) \
360 ((v128_t)__builtin_wasm_replace_lane_i16x8((__i16x8)(__a), __i, __b))
361
wasm_i32x4_splat(int32_t __a)362 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_splat(int32_t __a) {
363 return (v128_t)(__i32x4){__a, __a, __a, __a};
364 }
365
366 #define wasm_i32x4_extract_lane(__a, __i) \
367 (__builtin_wasm_extract_lane_i32x4((__i32x4)(__a), __i))
368
369 #define wasm_i32x4_replace_lane(__a, __i, __b) \
370 ((v128_t)__builtin_wasm_replace_lane_i32x4((__i32x4)(__a), __i, __b))
371
wasm_i64x2_splat(int64_t __a)372 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_splat(int64_t __a) {
373 return (v128_t)(__i64x2){__a, __a};
374 }
375
376 #define wasm_i64x2_extract_lane(__a, __i) \
377 (__builtin_wasm_extract_lane_i64x2((__i64x2)(__a), __i))
378
379 #define wasm_i64x2_replace_lane(__a, __i, __b) \
380 ((v128_t)__builtin_wasm_replace_lane_i64x2((__i64x2)(__a), __i, __b))
381
wasm_f32x4_splat(float __a)382 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_splat(float __a) {
383 return (v128_t)(__f32x4){__a, __a, __a, __a};
384 }
385
386 #define wasm_f32x4_extract_lane(__a, __i) \
387 (__builtin_wasm_extract_lane_f32x4((__f32x4)(__a), __i))
388
389 #define wasm_f32x4_replace_lane(__a, __i, __b) \
390 ((v128_t)__builtin_wasm_replace_lane_f32x4((__f32x4)(__a), __i, __b))
391
wasm_f64x2_splat(double __a)392 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_splat(double __a) {
393 return (v128_t)(__f64x2){__a, __a};
394 }
395
396 #define wasm_f64x2_extract_lane(__a, __i) \
397 (__builtin_wasm_extract_lane_f64x2((__f64x2)(__a), __i))
398
399 #define wasm_f64x2_replace_lane(__a, __i, __b) \
400 ((v128_t)__builtin_wasm_replace_lane_f64x2((__f64x2)(__a), __i, __b))
401
wasm_i8x16_eq(v128_t __a,v128_t __b)402 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_eq(v128_t __a,
403 v128_t __b) {
404 return (v128_t)((__i8x16)__a == (__i8x16)__b);
405 }
406
wasm_i8x16_ne(v128_t __a,v128_t __b)407 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ne(v128_t __a,
408 v128_t __b) {
409 return (v128_t)((__i8x16)__a != (__i8x16)__b);
410 }
411
wasm_i8x16_lt(v128_t __a,v128_t __b)412 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_lt(v128_t __a,
413 v128_t __b) {
414 return (v128_t)((__i8x16)__a < (__i8x16)__b);
415 }
416
wasm_u8x16_lt(v128_t __a,v128_t __b)417 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_lt(v128_t __a,
418 v128_t __b) {
419 return (v128_t)((__u8x16)__a < (__u8x16)__b);
420 }
421
wasm_i8x16_gt(v128_t __a,v128_t __b)422 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_gt(v128_t __a,
423 v128_t __b) {
424 return (v128_t)((__i8x16)__a > (__i8x16)__b);
425 }
426
wasm_u8x16_gt(v128_t __a,v128_t __b)427 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_gt(v128_t __a,
428 v128_t __b) {
429 return (v128_t)((__u8x16)__a > (__u8x16)__b);
430 }
431
wasm_i8x16_le(v128_t __a,v128_t __b)432 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_le(v128_t __a,
433 v128_t __b) {
434 return (v128_t)((__i8x16)__a <= (__i8x16)__b);
435 }
436
wasm_u8x16_le(v128_t __a,v128_t __b)437 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_le(v128_t __a,
438 v128_t __b) {
439 return (v128_t)((__u8x16)__a <= (__u8x16)__b);
440 }
441
wasm_i8x16_ge(v128_t __a,v128_t __b)442 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ge(v128_t __a,
443 v128_t __b) {
444 return (v128_t)((__i8x16)__a >= (__i8x16)__b);
445 }
446
wasm_u8x16_ge(v128_t __a,v128_t __b)447 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_ge(v128_t __a,
448 v128_t __b) {
449 return (v128_t)((__u8x16)__a >= (__u8x16)__b);
450 }
451
wasm_i16x8_eq(v128_t __a,v128_t __b)452 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_eq(v128_t __a,
453 v128_t __b) {
454 return (v128_t)((__i16x8)__a == (__i16x8)__b);
455 }
456
wasm_i16x8_ne(v128_t __a,v128_t __b)457 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ne(v128_t __a,
458 v128_t __b) {
459 return (v128_t)((__u16x8)__a != (__u16x8)__b);
460 }
461
wasm_i16x8_lt(v128_t __a,v128_t __b)462 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_lt(v128_t __a,
463 v128_t __b) {
464 return (v128_t)((__i16x8)__a < (__i16x8)__b);
465 }
466
wasm_u16x8_lt(v128_t __a,v128_t __b)467 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_lt(v128_t __a,
468 v128_t __b) {
469 return (v128_t)((__u16x8)__a < (__u16x8)__b);
470 }
471
wasm_i16x8_gt(v128_t __a,v128_t __b)472 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_gt(v128_t __a,
473 v128_t __b) {
474 return (v128_t)((__i16x8)__a > (__i16x8)__b);
475 }
476
wasm_u16x8_gt(v128_t __a,v128_t __b)477 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_gt(v128_t __a,
478 v128_t __b) {
479 return (v128_t)((__u16x8)__a > (__u16x8)__b);
480 }
481
wasm_i16x8_le(v128_t __a,v128_t __b)482 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_le(v128_t __a,
483 v128_t __b) {
484 return (v128_t)((__i16x8)__a <= (__i16x8)__b);
485 }
486
wasm_u16x8_le(v128_t __a,v128_t __b)487 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_le(v128_t __a,
488 v128_t __b) {
489 return (v128_t)((__u16x8)__a <= (__u16x8)__b);
490 }
491
wasm_i16x8_ge(v128_t __a,v128_t __b)492 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ge(v128_t __a,
493 v128_t __b) {
494 return (v128_t)((__i16x8)__a >= (__i16x8)__b);
495 }
496
wasm_u16x8_ge(v128_t __a,v128_t __b)497 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_ge(v128_t __a,
498 v128_t __b) {
499 return (v128_t)((__u16x8)__a >= (__u16x8)__b);
500 }
501
wasm_i32x4_eq(v128_t __a,v128_t __b)502 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_eq(v128_t __a,
503 v128_t __b) {
504 return (v128_t)((__i32x4)__a == (__i32x4)__b);
505 }
506
wasm_i32x4_ne(v128_t __a,v128_t __b)507 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ne(v128_t __a,
508 v128_t __b) {
509 return (v128_t)((__i32x4)__a != (__i32x4)__b);
510 }
511
wasm_i32x4_lt(v128_t __a,v128_t __b)512 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_lt(v128_t __a,
513 v128_t __b) {
514 return (v128_t)((__i32x4)__a < (__i32x4)__b);
515 }
516
wasm_u32x4_lt(v128_t __a,v128_t __b)517 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_lt(v128_t __a,
518 v128_t __b) {
519 return (v128_t)((__u32x4)__a < (__u32x4)__b);
520 }
521
wasm_i32x4_gt(v128_t __a,v128_t __b)522 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_gt(v128_t __a,
523 v128_t __b) {
524 return (v128_t)((__i32x4)__a > (__i32x4)__b);
525 }
526
wasm_u32x4_gt(v128_t __a,v128_t __b)527 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_gt(v128_t __a,
528 v128_t __b) {
529 return (v128_t)((__u32x4)__a > (__u32x4)__b);
530 }
531
wasm_i32x4_le(v128_t __a,v128_t __b)532 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_le(v128_t __a,
533 v128_t __b) {
534 return (v128_t)((__i32x4)__a <= (__i32x4)__b);
535 }
536
wasm_u32x4_le(v128_t __a,v128_t __b)537 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_le(v128_t __a,
538 v128_t __b) {
539 return (v128_t)((__u32x4)__a <= (__u32x4)__b);
540 }
541
wasm_i32x4_ge(v128_t __a,v128_t __b)542 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ge(v128_t __a,
543 v128_t __b) {
544 return (v128_t)((__i32x4)__a >= (__i32x4)__b);
545 }
546
wasm_u32x4_ge(v128_t __a,v128_t __b)547 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_ge(v128_t __a,
548 v128_t __b) {
549 return (v128_t)((__u32x4)__a >= (__u32x4)__b);
550 }
551
wasm_i64x2_eq(v128_t __a,v128_t __b)552 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_eq(v128_t __a,
553 v128_t __b) {
554 return (v128_t)((__i64x2)__a == (__i64x2)__b);
555 }
556
wasm_i64x2_ne(v128_t __a,v128_t __b)557 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ne(v128_t __a,
558 v128_t __b) {
559 return (v128_t)((__i64x2)__a != (__i64x2)__b);
560 }
561
wasm_i64x2_lt(v128_t __a,v128_t __b)562 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_lt(v128_t __a,
563 v128_t __b) {
564 return (v128_t)((__i64x2)__a < (__i64x2)__b);
565 }
566
wasm_i64x2_gt(v128_t __a,v128_t __b)567 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_gt(v128_t __a,
568 v128_t __b) {
569 return (v128_t)((__i64x2)__a > (__i64x2)__b);
570 }
571
wasm_i64x2_le(v128_t __a,v128_t __b)572 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_le(v128_t __a,
573 v128_t __b) {
574 return (v128_t)((__i64x2)__a <= (__i64x2)__b);
575 }
576
wasm_i64x2_ge(v128_t __a,v128_t __b)577 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ge(v128_t __a,
578 v128_t __b) {
579 return (v128_t)((__i64x2)__a >= (__i64x2)__b);
580 }
581
wasm_f32x4_eq(v128_t __a,v128_t __b)582 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_eq(v128_t __a,
583 v128_t __b) {
584 return (v128_t)((__f32x4)__a == (__f32x4)__b);
585 }
586
wasm_f32x4_ne(v128_t __a,v128_t __b)587 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ne(v128_t __a,
588 v128_t __b) {
589 return (v128_t)((__f32x4)__a != (__f32x4)__b);
590 }
591
wasm_f32x4_lt(v128_t __a,v128_t __b)592 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_lt(v128_t __a,
593 v128_t __b) {
594 return (v128_t)((__f32x4)__a < (__f32x4)__b);
595 }
596
wasm_f32x4_gt(v128_t __a,v128_t __b)597 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_gt(v128_t __a,
598 v128_t __b) {
599 return (v128_t)((__f32x4)__a > (__f32x4)__b);
600 }
601
wasm_f32x4_le(v128_t __a,v128_t __b)602 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_le(v128_t __a,
603 v128_t __b) {
604 return (v128_t)((__f32x4)__a <= (__f32x4)__b);
605 }
606
wasm_f32x4_ge(v128_t __a,v128_t __b)607 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ge(v128_t __a,
608 v128_t __b) {
609 return (v128_t)((__f32x4)__a >= (__f32x4)__b);
610 }
611
wasm_f64x2_eq(v128_t __a,v128_t __b)612 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_eq(v128_t __a,
613 v128_t __b) {
614 return (v128_t)((__f64x2)__a == (__f64x2)__b);
615 }
616
wasm_f64x2_ne(v128_t __a,v128_t __b)617 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ne(v128_t __a,
618 v128_t __b) {
619 return (v128_t)((__f64x2)__a != (__f64x2)__b);
620 }
621
wasm_f64x2_lt(v128_t __a,v128_t __b)622 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_lt(v128_t __a,
623 v128_t __b) {
624 return (v128_t)((__f64x2)__a < (__f64x2)__b);
625 }
626
wasm_f64x2_gt(v128_t __a,v128_t __b)627 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_gt(v128_t __a,
628 v128_t __b) {
629 return (v128_t)((__f64x2)__a > (__f64x2)__b);
630 }
631
wasm_f64x2_le(v128_t __a,v128_t __b)632 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_le(v128_t __a,
633 v128_t __b) {
634 return (v128_t)((__f64x2)__a <= (__f64x2)__b);
635 }
636
wasm_f64x2_ge(v128_t __a,v128_t __b)637 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ge(v128_t __a,
638 v128_t __b) {
639 return (v128_t)((__f64x2)__a >= (__f64x2)__b);
640 }
641
wasm_v128_not(v128_t __a)642 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_not(v128_t __a) {
643 return ~__a;
644 }
645
wasm_v128_and(v128_t __a,v128_t __b)646 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_and(v128_t __a,
647 v128_t __b) {
648 return __a & __b;
649 }
650
wasm_v128_or(v128_t __a,v128_t __b)651 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_or(v128_t __a,
652 v128_t __b) {
653 return __a | __b;
654 }
655
wasm_v128_xor(v128_t __a,v128_t __b)656 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_xor(v128_t __a,
657 v128_t __b) {
658 return __a ^ __b;
659 }
660
wasm_v128_andnot(v128_t __a,v128_t __b)661 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_andnot(v128_t __a,
662 v128_t __b) {
663 return __a & ~__b;
664 }
665
wasm_v128_any_true(v128_t __a)666 static __inline__ bool __DEFAULT_FN_ATTRS wasm_v128_any_true(v128_t __a) {
667 return __builtin_wasm_any_true_v128((__i8x16)__a);
668 }
669
wasm_v128_bitselect(v128_t __a,v128_t __b,v128_t __mask)670 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_bitselect(v128_t __a,
671 v128_t __b,
672 v128_t __mask) {
673 return (v128_t)__builtin_wasm_bitselect((__i32x4)__a, (__i32x4)__b,
674 (__i32x4)__mask);
675 }
676
wasm_i8x16_abs(v128_t __a)677 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_abs(v128_t __a) {
678 return (v128_t)__builtin_wasm_abs_i8x16((__i8x16)__a);
679 }
680
wasm_i8x16_neg(v128_t __a)681 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_neg(v128_t __a) {
682 return (v128_t)(-(__u8x16)__a);
683 }
684
wasm_i8x16_all_true(v128_t __a)685 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i8x16_all_true(v128_t __a) {
686 return __builtin_wasm_all_true_i8x16((__i8x16)__a);
687 }
688
wasm_i8x16_bitmask(v128_t __a)689 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i8x16_bitmask(v128_t __a) {
690 return __builtin_wasm_bitmask_i8x16((__i8x16)__a);
691 }
692
wasm_i8x16_popcnt(v128_t __a)693 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_popcnt(v128_t __a) {
694 return (v128_t)__builtin_wasm_popcnt_i8x16((__i8x16)__a);
695 }
696
wasm_i8x16_shl(v128_t __a,int32_t __b)697 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shl(v128_t __a,
698 int32_t __b) {
699 return (v128_t)((__i8x16)__a << __b);
700 }
701
wasm_i8x16_shr(v128_t __a,int32_t __b)702 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shr(v128_t __a,
703 int32_t __b) {
704 return (v128_t)((__i8x16)__a >> __b);
705 }
706
wasm_u8x16_shr(v128_t __a,int32_t __b)707 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_shr(v128_t __a,
708 int32_t __b) {
709 return (v128_t)((__u8x16)__a >> __b);
710 }
711
wasm_i8x16_add(v128_t __a,v128_t __b)712 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add(v128_t __a,
713 v128_t __b) {
714 return (v128_t)((__u8x16)__a + (__u8x16)__b);
715 }
716
wasm_i8x16_add_sat(v128_t __a,v128_t __b)717 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add_sat(v128_t __a,
718 v128_t __b) {
719 return (v128_t)__builtin_wasm_add_sat_s_i8x16((__i8x16)__a, (__i8x16)__b);
720 }
721
wasm_u8x16_add_sat(v128_t __a,v128_t __b)722 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_add_sat(v128_t __a,
723 v128_t __b) {
724 return (v128_t)__builtin_wasm_add_sat_u_i8x16((__u8x16)__a, (__u8x16)__b);
725 }
726
wasm_i8x16_sub(v128_t __a,v128_t __b)727 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub(v128_t __a,
728 v128_t __b) {
729 return (v128_t)((__u8x16)__a - (__u8x16)__b);
730 }
731
wasm_i8x16_sub_sat(v128_t __a,v128_t __b)732 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub_sat(v128_t __a,
733 v128_t __b) {
734 return (v128_t)__builtin_wasm_sub_sat_s_i8x16((__i8x16)__a, (__i8x16)__b);
735 }
736
wasm_u8x16_sub_sat(v128_t __a,v128_t __b)737 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_sub_sat(v128_t __a,
738 v128_t __b) {
739 return (v128_t)__builtin_wasm_sub_sat_u_i8x16((__u8x16)__a, (__u8x16)__b);
740 }
741
wasm_i8x16_min(v128_t __a,v128_t __b)742 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_min(v128_t __a,
743 v128_t __b) {
744 return (v128_t)__builtin_wasm_min_s_i8x16((__i8x16)__a, (__i8x16)__b);
745 }
746
wasm_u8x16_min(v128_t __a,v128_t __b)747 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_min(v128_t __a,
748 v128_t __b) {
749 return (v128_t)__builtin_wasm_min_u_i8x16((__u8x16)__a, (__u8x16)__b);
750 }
751
wasm_i8x16_max(v128_t __a,v128_t __b)752 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_max(v128_t __a,
753 v128_t __b) {
754 return (v128_t)__builtin_wasm_max_s_i8x16((__i8x16)__a, (__i8x16)__b);
755 }
756
wasm_u8x16_max(v128_t __a,v128_t __b)757 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_max(v128_t __a,
758 v128_t __b) {
759 return (v128_t)__builtin_wasm_max_u_i8x16((__u8x16)__a, (__u8x16)__b);
760 }
761
wasm_u8x16_avgr(v128_t __a,v128_t __b)762 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_avgr(v128_t __a,
763 v128_t __b) {
764 return (v128_t)__builtin_wasm_avgr_u_i8x16((__u8x16)__a, (__u8x16)__b);
765 }
766
wasm_i16x8_abs(v128_t __a)767 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_abs(v128_t __a) {
768 return (v128_t)__builtin_wasm_abs_i16x8((__i16x8)__a);
769 }
770
wasm_i16x8_neg(v128_t __a)771 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_neg(v128_t __a) {
772 return (v128_t)(-(__u16x8)__a);
773 }
774
wasm_i16x8_all_true(v128_t __a)775 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i16x8_all_true(v128_t __a) {
776 return __builtin_wasm_all_true_i16x8((__i16x8)__a);
777 }
778
wasm_i16x8_bitmask(v128_t __a)779 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i16x8_bitmask(v128_t __a) {
780 return __builtin_wasm_bitmask_i16x8((__i16x8)__a);
781 }
782
wasm_i16x8_shl(v128_t __a,int32_t __b)783 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shl(v128_t __a,
784 int32_t __b) {
785 return (v128_t)((__i16x8)__a << __b);
786 }
787
wasm_i16x8_shr(v128_t __a,int32_t __b)788 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shr(v128_t __a,
789 int32_t __b) {
790 return (v128_t)((__i16x8)__a >> __b);
791 }
792
wasm_u16x8_shr(v128_t __a,int32_t __b)793 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_shr(v128_t __a,
794 int32_t __b) {
795 return (v128_t)((__u16x8)__a >> __b);
796 }
797
wasm_i16x8_add(v128_t __a,v128_t __b)798 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add(v128_t __a,
799 v128_t __b) {
800 return (v128_t)((__u16x8)__a + (__u16x8)__b);
801 }
802
wasm_i16x8_add_sat(v128_t __a,v128_t __b)803 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add_sat(v128_t __a,
804 v128_t __b) {
805 return (v128_t)__builtin_wasm_add_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
806 }
807
wasm_u16x8_add_sat(v128_t __a,v128_t __b)808 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_add_sat(v128_t __a,
809 v128_t __b) {
810 return (v128_t)__builtin_wasm_add_sat_u_i16x8((__u16x8)__a, (__u16x8)__b);
811 }
812
wasm_i16x8_sub(v128_t __a,v128_t __b)813 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub(v128_t __a,
814 v128_t __b) {
815 return (v128_t)((__i16x8)__a - (__i16x8)__b);
816 }
817
wasm_i16x8_sub_sat(v128_t __a,v128_t __b)818 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub_sat(v128_t __a,
819 v128_t __b) {
820 return (v128_t)__builtin_wasm_sub_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
821 }
822
wasm_u16x8_sub_sat(v128_t __a,v128_t __b)823 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_sub_sat(v128_t __a,
824 v128_t __b) {
825 return (v128_t)__builtin_wasm_sub_sat_u_i16x8((__u16x8)__a, (__u16x8)__b);
826 }
827
wasm_i16x8_mul(v128_t __a,v128_t __b)828 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_mul(v128_t __a,
829 v128_t __b) {
830 return (v128_t)((__u16x8)__a * (__u16x8)__b);
831 }
832
wasm_i16x8_min(v128_t __a,v128_t __b)833 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_min(v128_t __a,
834 v128_t __b) {
835 return (v128_t)__builtin_wasm_min_s_i16x8((__i16x8)__a, (__i16x8)__b);
836 }
837
wasm_u16x8_min(v128_t __a,v128_t __b)838 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_min(v128_t __a,
839 v128_t __b) {
840 return (v128_t)__builtin_wasm_min_u_i16x8((__u16x8)__a, (__u16x8)__b);
841 }
842
wasm_i16x8_max(v128_t __a,v128_t __b)843 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_max(v128_t __a,
844 v128_t __b) {
845 return (v128_t)__builtin_wasm_max_s_i16x8((__i16x8)__a, (__i16x8)__b);
846 }
847
wasm_u16x8_max(v128_t __a,v128_t __b)848 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_max(v128_t __a,
849 v128_t __b) {
850 return (v128_t)__builtin_wasm_max_u_i16x8((__u16x8)__a, (__u16x8)__b);
851 }
852
wasm_u16x8_avgr(v128_t __a,v128_t __b)853 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_avgr(v128_t __a,
854 v128_t __b) {
855 return (v128_t)__builtin_wasm_avgr_u_i16x8((__u16x8)__a, (__u16x8)__b);
856 }
857
wasm_i32x4_abs(v128_t __a)858 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_abs(v128_t __a) {
859 return (v128_t)__builtin_wasm_abs_i32x4((__i32x4)__a);
860 }
861
wasm_i32x4_neg(v128_t __a)862 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_neg(v128_t __a) {
863 return (v128_t)(-(__u32x4)__a);
864 }
865
wasm_i32x4_all_true(v128_t __a)866 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i32x4_all_true(v128_t __a) {
867 return __builtin_wasm_all_true_i32x4((__i32x4)__a);
868 }
869
wasm_i32x4_bitmask(v128_t __a)870 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i32x4_bitmask(v128_t __a) {
871 return __builtin_wasm_bitmask_i32x4((__i32x4)__a);
872 }
873
wasm_i32x4_shl(v128_t __a,int32_t __b)874 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shl(v128_t __a,
875 int32_t __b) {
876 return (v128_t)((__i32x4)__a << __b);
877 }
878
wasm_i32x4_shr(v128_t __a,int32_t __b)879 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shr(v128_t __a,
880 int32_t __b) {
881 return (v128_t)((__i32x4)__a >> __b);
882 }
883
wasm_u32x4_shr(v128_t __a,int32_t __b)884 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_shr(v128_t __a,
885 int32_t __b) {
886 return (v128_t)((__u32x4)__a >> __b);
887 }
888
wasm_i32x4_add(v128_t __a,v128_t __b)889 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_add(v128_t __a,
890 v128_t __b) {
891 return (v128_t)((__u32x4)__a + (__u32x4)__b);
892 }
893
wasm_i32x4_sub(v128_t __a,v128_t __b)894 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_sub(v128_t __a,
895 v128_t __b) {
896 return (v128_t)((__u32x4)__a - (__u32x4)__b);
897 }
898
wasm_i32x4_mul(v128_t __a,v128_t __b)899 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_mul(v128_t __a,
900 v128_t __b) {
901 return (v128_t)((__u32x4)__a * (__u32x4)__b);
902 }
903
wasm_i32x4_min(v128_t __a,v128_t __b)904 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_min(v128_t __a,
905 v128_t __b) {
906 return (v128_t)__builtin_wasm_min_s_i32x4((__i32x4)__a, (__i32x4)__b);
907 }
908
wasm_u32x4_min(v128_t __a,v128_t __b)909 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_min(v128_t __a,
910 v128_t __b) {
911 return (v128_t)__builtin_wasm_min_u_i32x4((__u32x4)__a, (__u32x4)__b);
912 }
913
wasm_i32x4_max(v128_t __a,v128_t __b)914 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_max(v128_t __a,
915 v128_t __b) {
916 return (v128_t)__builtin_wasm_max_s_i32x4((__i32x4)__a, (__i32x4)__b);
917 }
918
wasm_u32x4_max(v128_t __a,v128_t __b)919 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_max(v128_t __a,
920 v128_t __b) {
921 return (v128_t)__builtin_wasm_max_u_i32x4((__u32x4)__a, (__u32x4)__b);
922 }
923
wasm_i32x4_dot_i16x8(v128_t __a,v128_t __b)924 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_dot_i16x8(v128_t __a,
925 v128_t __b) {
926 return (v128_t)__builtin_wasm_dot_s_i32x4_i16x8((__i16x8)__a, (__i16x8)__b);
927 }
928
wasm_i64x2_abs(v128_t __a)929 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_abs(v128_t __a) {
930 return (v128_t)__builtin_wasm_abs_i64x2((__i64x2)__a);
931 }
932
wasm_i64x2_neg(v128_t __a)933 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_neg(v128_t __a) {
934 return (v128_t)(-(__u64x2)__a);
935 }
936
wasm_i64x2_all_true(v128_t __a)937 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i64x2_all_true(v128_t __a) {
938 return __builtin_wasm_all_true_i64x2((__i64x2)__a);
939 }
940
wasm_i64x2_bitmask(v128_t __a)941 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i64x2_bitmask(v128_t __a) {
942 return __builtin_wasm_bitmask_i64x2((__i64x2)__a);
943 }
944
wasm_i64x2_shl(v128_t __a,int32_t __b)945 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shl(v128_t __a,
946 int32_t __b) {
947 return (v128_t)((__i64x2)__a << (int64_t)__b);
948 }
949
wasm_i64x2_shr(v128_t __a,int32_t __b)950 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shr(v128_t __a,
951 int32_t __b) {
952 return (v128_t)((__i64x2)__a >> (int64_t)__b);
953 }
954
wasm_u64x2_shr(v128_t __a,int32_t __b)955 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_shr(v128_t __a,
956 int32_t __b) {
957 return (v128_t)((__u64x2)__a >> (int64_t)__b);
958 }
959
wasm_i64x2_add(v128_t __a,v128_t __b)960 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_add(v128_t __a,
961 v128_t __b) {
962 return (v128_t)((__u64x2)__a + (__u64x2)__b);
963 }
964
wasm_i64x2_sub(v128_t __a,v128_t __b)965 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_sub(v128_t __a,
966 v128_t __b) {
967 return (v128_t)((__u64x2)__a - (__u64x2)__b);
968 }
969
wasm_i64x2_mul(v128_t __a,v128_t __b)970 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_mul(v128_t __a,
971 v128_t __b) {
972 return (v128_t)((__u64x2)__a * (__u64x2)__b);
973 }
974
wasm_f32x4_abs(v128_t __a)975 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_abs(v128_t __a) {
976 return (v128_t)__builtin_wasm_abs_f32x4((__f32x4)__a);
977 }
978
wasm_f32x4_neg(v128_t __a)979 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_neg(v128_t __a) {
980 return (v128_t)(-(__f32x4)__a);
981 }
982
wasm_f32x4_sqrt(v128_t __a)983 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sqrt(v128_t __a) {
984 return (v128_t)__builtin_wasm_sqrt_f32x4((__f32x4)__a);
985 }
986
wasm_f32x4_ceil(v128_t __a)987 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ceil(v128_t __a) {
988 return (v128_t)__builtin_wasm_ceil_f32x4((__f32x4)__a);
989 }
990
wasm_f32x4_floor(v128_t __a)991 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_floor(v128_t __a) {
992 return (v128_t)__builtin_wasm_floor_f32x4((__f32x4)__a);
993 }
994
wasm_f32x4_trunc(v128_t __a)995 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_trunc(v128_t __a) {
996 return (v128_t)__builtin_wasm_trunc_f32x4((__f32x4)__a);
997 }
998
wasm_f32x4_nearest(v128_t __a)999 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_nearest(v128_t __a) {
1000 return (v128_t)__builtin_wasm_nearest_f32x4((__f32x4)__a);
1001 }
1002
wasm_f32x4_add(v128_t __a,v128_t __b)1003 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_add(v128_t __a,
1004 v128_t __b) {
1005 return (v128_t)((__f32x4)__a + (__f32x4)__b);
1006 }
1007
wasm_f32x4_sub(v128_t __a,v128_t __b)1008 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sub(v128_t __a,
1009 v128_t __b) {
1010 return (v128_t)((__f32x4)__a - (__f32x4)__b);
1011 }
1012
wasm_f32x4_mul(v128_t __a,v128_t __b)1013 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_mul(v128_t __a,
1014 v128_t __b) {
1015 return (v128_t)((__f32x4)__a * (__f32x4)__b);
1016 }
1017
wasm_f32x4_div(v128_t __a,v128_t __b)1018 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_div(v128_t __a,
1019 v128_t __b) {
1020 return (v128_t)((__f32x4)__a / (__f32x4)__b);
1021 }
1022
wasm_f32x4_min(v128_t __a,v128_t __b)1023 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_min(v128_t __a,
1024 v128_t __b) {
1025 return (v128_t)__builtin_wasm_min_f32x4((__f32x4)__a, (__f32x4)__b);
1026 }
1027
wasm_f32x4_max(v128_t __a,v128_t __b)1028 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_max(v128_t __a,
1029 v128_t __b) {
1030 return (v128_t)__builtin_wasm_max_f32x4((__f32x4)__a, (__f32x4)__b);
1031 }
1032
wasm_f32x4_pmin(v128_t __a,v128_t __b)1033 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmin(v128_t __a,
1034 v128_t __b) {
1035 return (v128_t)__builtin_wasm_pmin_f32x4((__f32x4)__a, (__f32x4)__b);
1036 }
1037
wasm_f32x4_pmax(v128_t __a,v128_t __b)1038 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmax(v128_t __a,
1039 v128_t __b) {
1040 return (v128_t)__builtin_wasm_pmax_f32x4((__f32x4)__a, (__f32x4)__b);
1041 }
1042
wasm_f64x2_abs(v128_t __a)1043 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_abs(v128_t __a) {
1044 return (v128_t)__builtin_wasm_abs_f64x2((__f64x2)__a);
1045 }
1046
wasm_f64x2_neg(v128_t __a)1047 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_neg(v128_t __a) {
1048 return (v128_t)(-(__f64x2)__a);
1049 }
1050
wasm_f64x2_sqrt(v128_t __a)1051 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sqrt(v128_t __a) {
1052 return (v128_t)__builtin_wasm_sqrt_f64x2((__f64x2)__a);
1053 }
1054
wasm_f64x2_ceil(v128_t __a)1055 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ceil(v128_t __a) {
1056 return (v128_t)__builtin_wasm_ceil_f64x2((__f64x2)__a);
1057 }
1058
wasm_f64x2_floor(v128_t __a)1059 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_floor(v128_t __a) {
1060 return (v128_t)__builtin_wasm_floor_f64x2((__f64x2)__a);
1061 }
1062
wasm_f64x2_trunc(v128_t __a)1063 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_trunc(v128_t __a) {
1064 return (v128_t)__builtin_wasm_trunc_f64x2((__f64x2)__a);
1065 }
1066
wasm_f64x2_nearest(v128_t __a)1067 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_nearest(v128_t __a) {
1068 return (v128_t)__builtin_wasm_nearest_f64x2((__f64x2)__a);
1069 }
1070
wasm_f64x2_add(v128_t __a,v128_t __b)1071 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_add(v128_t __a,
1072 v128_t __b) {
1073 return (v128_t)((__f64x2)__a + (__f64x2)__b);
1074 }
1075
wasm_f64x2_sub(v128_t __a,v128_t __b)1076 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sub(v128_t __a,
1077 v128_t __b) {
1078 return (v128_t)((__f64x2)__a - (__f64x2)__b);
1079 }
1080
wasm_f64x2_mul(v128_t __a,v128_t __b)1081 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_mul(v128_t __a,
1082 v128_t __b) {
1083 return (v128_t)((__f64x2)__a * (__f64x2)__b);
1084 }
1085
wasm_f64x2_div(v128_t __a,v128_t __b)1086 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_div(v128_t __a,
1087 v128_t __b) {
1088 return (v128_t)((__f64x2)__a / (__f64x2)__b);
1089 }
1090
wasm_f64x2_min(v128_t __a,v128_t __b)1091 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_min(v128_t __a,
1092 v128_t __b) {
1093 return (v128_t)__builtin_wasm_min_f64x2((__f64x2)__a, (__f64x2)__b);
1094 }
1095
wasm_f64x2_max(v128_t __a,v128_t __b)1096 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_max(v128_t __a,
1097 v128_t __b) {
1098 return (v128_t)__builtin_wasm_max_f64x2((__f64x2)__a, (__f64x2)__b);
1099 }
1100
wasm_f64x2_pmin(v128_t __a,v128_t __b)1101 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmin(v128_t __a,
1102 v128_t __b) {
1103 return (v128_t)__builtin_wasm_pmin_f64x2((__f64x2)__a, (__f64x2)__b);
1104 }
1105
wasm_f64x2_pmax(v128_t __a,v128_t __b)1106 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmax(v128_t __a,
1107 v128_t __b) {
1108 return (v128_t)__builtin_wasm_pmax_f64x2((__f64x2)__a, (__f64x2)__b);
1109 }
1110
1111 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_trunc_sat_f32x4(v128_t __a)1112 wasm_i32x4_trunc_sat_f32x4(v128_t __a) {
1113 return (v128_t)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4)__a);
1114 }
1115
1116 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_trunc_sat_f32x4(v128_t __a)1117 wasm_u32x4_trunc_sat_f32x4(v128_t __a) {
1118 return (v128_t)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4)__a);
1119 }
1120
1121 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_convert_i32x4(v128_t __a)1122 wasm_f32x4_convert_i32x4(v128_t __a) {
1123 return (v128_t) __builtin_convertvector((__i32x4)__a, __f32x4);
1124 }
1125
1126 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_convert_u32x4(v128_t __a)1127 wasm_f32x4_convert_u32x4(v128_t __a) {
1128 return (v128_t) __builtin_convertvector((__u32x4)__a, __f32x4);
1129 }
1130
1131 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_convert_low_i32x4(v128_t __a)1132 wasm_f64x2_convert_low_i32x4(v128_t __a) {
1133 return (v128_t) __builtin_convertvector((__i32x2){__a[0], __a[1]}, __f64x2);
1134 }
1135
1136 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_convert_low_u32x4(v128_t __a)1137 wasm_f64x2_convert_low_u32x4(v128_t __a) {
1138 return (v128_t) __builtin_convertvector((__u32x2){__a[0], __a[1]}, __f64x2);
1139 }
1140
1141 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a)1142 wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a) {
1143 return (v128_t)__builtin_wasm_trunc_sat_zero_s_f64x2_i32x4((__f64x2)__a);
1144 }
1145
1146 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a)1147 wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a) {
1148 return (v128_t)__builtin_wasm_trunc_sat_zero_u_f64x2_i32x4((__f64x2)__a);
1149 }
1150
1151 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_demote_f64x2_zero(v128_t __a)1152 wasm_f32x4_demote_f64x2_zero(v128_t __a) {
1153 return (v128_t)__builtin_wasm_demote_zero_f64x2_f32x4((__f64x2)__a);
1154 }
1155
1156 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_promote_low_f32x4(v128_t __a)1157 wasm_f64x2_promote_low_f32x4(v128_t __a) {
1158 return (v128_t)__builtin_wasm_promote_low_f32x4_f64x2((__f32x4)__a);
1159 }
1160
1161 #define wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1162 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1163 __c14, __c15) \
1164 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1165 (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5, \
1166 __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15))
1167
1168 #define wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1169 __c7) \
1170 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1171 (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2, \
1172 (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2, \
1173 (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2, \
1174 (__c7)*2 + 1))
1175
1176 #define wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1177 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1178 (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2, \
1179 (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3, \
1180 (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4, \
1181 (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3))
1182
1183 #define wasm_i64x2_shuffle(__a, __b, __c0, __c1) \
1184 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1185 (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2, \
1186 (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7, \
1187 (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4, \
1188 (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7))
1189
wasm_i8x16_swizzle(v128_t __a,v128_t __b)1190 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_swizzle(v128_t __a,
1191 v128_t __b) {
1192 return (v128_t)__builtin_wasm_swizzle_i8x16((__i8x16)__a, (__i8x16)__b);
1193 }
1194
1195 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_narrow_i16x8(v128_t __a,v128_t __b)1196 wasm_i8x16_narrow_i16x8(v128_t __a, v128_t __b) {
1197 return (v128_t)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8)__a,
1198 (__i16x8)__b);
1199 }
1200
1201 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_narrow_i16x8(v128_t __a,v128_t __b)1202 wasm_u8x16_narrow_i16x8(v128_t __a, v128_t __b) {
1203 return (v128_t)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8)__a,
1204 (__i16x8)__b);
1205 }
1206
1207 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_narrow_i32x4(v128_t __a,v128_t __b)1208 wasm_i16x8_narrow_i32x4(v128_t __a, v128_t __b) {
1209 return (v128_t)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4)__a,
1210 (__i32x4)__b);
1211 }
1212
1213 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_narrow_i32x4(v128_t __a,v128_t __b)1214 wasm_u16x8_narrow_i32x4(v128_t __a, v128_t __b) {
1215 return (v128_t)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4)__a,
1216 (__i32x4)__b);
1217 }
1218
1219 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extend_low_i8x16(v128_t __a)1220 wasm_i16x8_extend_low_i8x16(v128_t __a) {
1221 return (v128_t) __builtin_convertvector(
1222 (__i8x8){((__i8x16)__a)[0], ((__i8x16)__a)[1], ((__i8x16)__a)[2],
1223 ((__i8x16)__a)[3], ((__i8x16)__a)[4], ((__i8x16)__a)[5],
1224 ((__i8x16)__a)[6], ((__i8x16)__a)[7]},
1225 __i16x8);
1226 }
1227
1228 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extend_high_i8x16(v128_t __a)1229 wasm_i16x8_extend_high_i8x16(v128_t __a) {
1230 return (v128_t) __builtin_convertvector(
1231 (__i8x8){((__i8x16)__a)[8], ((__i8x16)__a)[9], ((__i8x16)__a)[10],
1232 ((__i8x16)__a)[11], ((__i8x16)__a)[12], ((__i8x16)__a)[13],
1233 ((__i8x16)__a)[14], ((__i8x16)__a)[15]},
1234 __i16x8);
1235 }
1236
1237 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extend_low_u8x16(v128_t __a)1238 wasm_u16x8_extend_low_u8x16(v128_t __a) {
1239 return (v128_t) __builtin_convertvector(
1240 (__u8x8){((__u8x16)__a)[0], ((__u8x16)__a)[1], ((__u8x16)__a)[2],
1241 ((__u8x16)__a)[3], ((__u8x16)__a)[4], ((__u8x16)__a)[5],
1242 ((__u8x16)__a)[6], ((__u8x16)__a)[7]},
1243 __u16x8);
1244 }
1245
1246 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extend_high_u8x16(v128_t __a)1247 wasm_u16x8_extend_high_u8x16(v128_t __a) {
1248 return (v128_t) __builtin_convertvector(
1249 (__u8x8){((__u8x16)__a)[8], ((__u8x16)__a)[9], ((__u8x16)__a)[10],
1250 ((__u8x16)__a)[11], ((__u8x16)__a)[12], ((__u8x16)__a)[13],
1251 ((__u8x16)__a)[14], ((__u8x16)__a)[15]},
1252 __u16x8);
1253 }
1254
1255 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extend_low_i16x8(v128_t __a)1256 wasm_i32x4_extend_low_i16x8(v128_t __a) {
1257 return (v128_t) __builtin_convertvector(
1258 (__i16x4){((__i16x8)__a)[0], ((__i16x8)__a)[1], ((__i16x8)__a)[2],
1259 ((__i16x8)__a)[3]},
1260 __i32x4);
1261 }
1262
1263 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extend_high_i16x8(v128_t __a)1264 wasm_i32x4_extend_high_i16x8(v128_t __a) {
1265 return (v128_t) __builtin_convertvector(
1266 (__i16x4){((__i16x8)__a)[4], ((__i16x8)__a)[5], ((__i16x8)__a)[6],
1267 ((__i16x8)__a)[7]},
1268 __i32x4);
1269 }
1270
1271 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extend_low_u16x8(v128_t __a)1272 wasm_u32x4_extend_low_u16x8(v128_t __a) {
1273 return (v128_t) __builtin_convertvector(
1274 (__u16x4){((__u16x8)__a)[0], ((__u16x8)__a)[1], ((__u16x8)__a)[2],
1275 ((__u16x8)__a)[3]},
1276 __u32x4);
1277 }
1278
1279 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extend_high_u16x8(v128_t __a)1280 wasm_u32x4_extend_high_u16x8(v128_t __a) {
1281 return (v128_t) __builtin_convertvector(
1282 (__u16x4){((__u16x8)__a)[4], ((__u16x8)__a)[5], ((__u16x8)__a)[6],
1283 ((__u16x8)__a)[7]},
1284 __u32x4);
1285 }
1286
1287 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extend_low_i32x4(v128_t __a)1288 wasm_i64x2_extend_low_i32x4(v128_t __a) {
1289 return (v128_t) __builtin_convertvector(
1290 (__i32x2){((__i32x4)__a)[0], ((__i32x4)__a)[1]}, __i64x2);
1291 }
1292
1293 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extend_high_i32x4(v128_t __a)1294 wasm_i64x2_extend_high_i32x4(v128_t __a) {
1295 return (v128_t) __builtin_convertvector(
1296 (__i32x2){((__i32x4)__a)[2], ((__i32x4)__a)[3]}, __i64x2);
1297 }
1298
1299 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extend_low_u32x4(v128_t __a)1300 wasm_u64x2_extend_low_u32x4(v128_t __a) {
1301 return (v128_t) __builtin_convertvector(
1302 (__u32x2){((__u32x4)__a)[0], ((__u32x4)__a)[1]}, __u64x2);
1303 }
1304
1305 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extend_high_u32x4(v128_t __a)1306 wasm_u64x2_extend_high_u32x4(v128_t __a) {
1307 return (v128_t) __builtin_convertvector(
1308 (__u32x2){((__u32x4)__a)[2], ((__u32x4)__a)[3]}, __u64x2);
1309 }
1310
1311 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extadd_pairwise_i8x16(v128_t __a)1312 wasm_i16x8_extadd_pairwise_i8x16(v128_t __a) {
1313 return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_s_i16x8((__i8x16)__a);
1314 }
1315
1316 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extadd_pairwise_u8x16(v128_t __a)1317 wasm_u16x8_extadd_pairwise_u8x16(v128_t __a) {
1318 return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_u_i16x8((__u8x16)__a);
1319 }
1320
1321 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extadd_pairwise_i16x8(v128_t __a)1322 wasm_i32x4_extadd_pairwise_i16x8(v128_t __a) {
1323 return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_s_i32x4((__i16x8)__a);
1324 }
1325
1326 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extadd_pairwise_u16x8(v128_t __a)1327 wasm_u32x4_extadd_pairwise_u16x8(v128_t __a) {
1328 return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_u_i32x4((__u16x8)__a);
1329 }
1330
1331 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extmul_low_i8x16(v128_t __a,v128_t __b)1332 wasm_i16x8_extmul_low_i8x16(v128_t __a, v128_t __b) {
1333 return (v128_t)__builtin_wasm_extmul_low_i8x16_s_i16x8((__i8x16)__a,
1334 (__i8x16)__b);
1335 }
1336
1337 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extmul_high_i8x16(v128_t __a,v128_t __b)1338 wasm_i16x8_extmul_high_i8x16(v128_t __a, v128_t __b) {
1339 return (v128_t)__builtin_wasm_extmul_high_i8x16_s_i16x8((__i8x16)__a,
1340 (__i8x16)__b);
1341 }
1342
1343 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extmul_low_u8x16(v128_t __a,v128_t __b)1344 wasm_u16x8_extmul_low_u8x16(v128_t __a, v128_t __b) {
1345 return (v128_t)__builtin_wasm_extmul_low_i8x16_u_i16x8((__u8x16)__a,
1346 (__u8x16)__b);
1347 }
1348
1349 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extmul_high_u8x16(v128_t __a,v128_t __b)1350 wasm_u16x8_extmul_high_u8x16(v128_t __a, v128_t __b) {
1351 return (v128_t)__builtin_wasm_extmul_high_i8x16_u_i16x8((__u8x16)__a,
1352 (__u8x16)__b);
1353 }
1354
1355 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extmul_low_i16x8(v128_t __a,v128_t __b)1356 wasm_i32x4_extmul_low_i16x8(v128_t __a, v128_t __b) {
1357 return (v128_t)__builtin_wasm_extmul_low_i16x8_s_i32x4((__i16x8)__a,
1358 (__i16x8)__b);
1359 }
1360
1361 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extmul_high_i16x8(v128_t __a,v128_t __b)1362 wasm_i32x4_extmul_high_i16x8(v128_t __a, v128_t __b) {
1363 return (v128_t)__builtin_wasm_extmul_high_i16x8_s_i32x4((__i16x8)__a,
1364 (__i16x8)__b);
1365 }
1366
1367 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extmul_low_u16x8(v128_t __a,v128_t __b)1368 wasm_u32x4_extmul_low_u16x8(v128_t __a, v128_t __b) {
1369 return (v128_t)__builtin_wasm_extmul_low_i16x8_u_i32x4((__u16x8)__a,
1370 (__u16x8)__b);
1371 }
1372
1373 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extmul_high_u16x8(v128_t __a,v128_t __b)1374 wasm_u32x4_extmul_high_u16x8(v128_t __a, v128_t __b) {
1375 return (v128_t)__builtin_wasm_extmul_high_i16x8_u_i32x4((__u16x8)__a,
1376 (__u16x8)__b);
1377 }
1378
1379 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extmul_low_i32x4(v128_t __a,v128_t __b)1380 wasm_i64x2_extmul_low_i32x4(v128_t __a, v128_t __b) {
1381 return (v128_t)__builtin_wasm_extmul_low_i32x4_s_i64x2((__i32x4)__a,
1382 (__i32x4)__b);
1383 }
1384
1385 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extmul_high_i32x4(v128_t __a,v128_t __b)1386 wasm_i64x2_extmul_high_i32x4(v128_t __a, v128_t __b) {
1387 return (v128_t)__builtin_wasm_extmul_high_i32x4_s_i64x2((__i32x4)__a,
1388 (__i32x4)__b);
1389 }
1390
1391 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extmul_low_u32x4(v128_t __a,v128_t __b)1392 wasm_u64x2_extmul_low_u32x4(v128_t __a, v128_t __b) {
1393 return (v128_t)__builtin_wasm_extmul_low_i32x4_u_i64x2((__u32x4)__a,
1394 (__u32x4)__b);
1395 }
1396
1397 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extmul_high_u32x4(v128_t __a,v128_t __b)1398 wasm_u64x2_extmul_high_u32x4(v128_t __a, v128_t __b) {
1399 return (v128_t)__builtin_wasm_extmul_high_i32x4_u_i64x2((__u32x4)__a,
1400 (__u32x4)__b);
1401 }
1402
wasm_i16x8_q15mulr_sat(v128_t __a,v128_t __b)1403 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_q15mulr_sat(v128_t __a,
1404 v128_t __b) {
1405 return (v128_t)__builtin_wasm_q15mulr_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
1406 }
1407
1408 // Old intrinsic names supported to ease transitioning to the standard names. Do
1409 // not use these; they will be removed in the near future.
1410
1411 #define __DEPRECATED_FN_ATTRS(__replacement) \
1412 __DEFAULT_FN_ATTRS __attribute__( \
1413 (deprecated("use " __replacement " instead", __replacement)))
1414
1415 #define __WASM_STR(X) #X
1416
1417 #ifdef __DEPRECATED
1418 #define __DEPRECATED_WASM_MACRO(__name, __replacement) \
1419 _Pragma(__WASM_STR(GCC warning( \
1420 "'" __name "' is deprecated: use '" __replacement "' instead")))
1421 #else
1422 #define __DEPRECATED_WASM_MACRO(__name, __replacement)
1423 #endif
1424
1425 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load8_splat")
wasm_v8x16_load_splat(const void * __mem)1426 wasm_v8x16_load_splat(const void *__mem) {
1427 return wasm_v128_load8_splat(__mem);
1428 }
1429
1430 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load16_splat")
wasm_v16x8_load_splat(const void * __mem)1431 wasm_v16x8_load_splat(const void *__mem) {
1432 return wasm_v128_load16_splat(__mem);
1433 }
1434
1435 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load32_splat")
wasm_v32x4_load_splat(const void * __mem)1436 wasm_v32x4_load_splat(const void *__mem) {
1437 return wasm_v128_load32_splat(__mem);
1438 }
1439
1440 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load64_splat")
wasm_v64x2_load_splat(const void * __mem)1441 wasm_v64x2_load_splat(const void *__mem) {
1442 return wasm_v128_load64_splat(__mem);
1443 }
1444
1445 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_load8x8")
wasm_i16x8_load_8x8(const void * __mem)1446 wasm_i16x8_load_8x8(const void *__mem) {
1447 return wasm_i16x8_load8x8(__mem);
1448 }
1449
1450 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_load8x8")
wasm_u16x8_load_8x8(const void * __mem)1451 wasm_u16x8_load_8x8(const void *__mem) {
1452 return wasm_u16x8_load8x8(__mem);
1453 }
1454
1455 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_load16x4")
wasm_i32x4_load_16x4(const void * __mem)1456 wasm_i32x4_load_16x4(const void *__mem) {
1457 return wasm_i32x4_load16x4(__mem);
1458 }
1459
1460 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_load16x4")
wasm_u32x4_load_16x4(const void * __mem)1461 wasm_u32x4_load_16x4(const void *__mem) {
1462 return wasm_u32x4_load16x4(__mem);
1463 }
1464
1465 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i64x2_load32x2")
wasm_i64x2_load_32x2(const void * __mem)1466 wasm_i64x2_load_32x2(const void *__mem) {
1467 return wasm_i64x2_load32x2(__mem);
1468 }
1469
1470 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u64x2_load32x2")
wasm_u64x2_load_32x2(const void * __mem)1471 wasm_u64x2_load_32x2(const void *__mem) {
1472 return wasm_u64x2_load32x2(__mem);
1473 }
1474
1475 #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1476 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1477 __c14, __c15) \
1478 __DEPRECATED_WASM_MACRO("wasm_v8x16_shuffle", "wasm_i8x16_shuffle") \
1479 wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \
1480 __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)
1481
1482 #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1483 __c7) \
1484 __DEPRECATED_WASM_MACRO("wasm_v16x8_shuffle", "wasm_i16x8_shuffle") \
1485 wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7)
1486
1487 #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1488 __DEPRECATED_WASM_MACRO("wasm_v32x4_shuffle", "wasm_i32x4_shuffle") \
1489 wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)
1490
1491 #define wasm_v64x2_shuffle(__a, __b, __c0, __c1) \
1492 __DEPRECATED_WASM_MACRO("wasm_v64x2_shuffle", "wasm_i64x2_shuffle") \
1493 wasm_i64x2_shuffle(__a, __b, __c0, __c1)
1494
1495 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_swizzle")
wasm_v8x16_swizzle(v128_t __a,v128_t __b)1496 wasm_v8x16_swizzle(v128_t __a, v128_t __b) {
1497 return wasm_i8x16_swizzle(__a, __b);
1498 }
1499
1500 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i8x16_any_true(v128_t __a)1501 wasm_i8x16_any_true(v128_t __a) {
1502 return wasm_v128_any_true(__a);
1503 }
1504
1505 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i16x8_any_true(v128_t __a)1506 wasm_i16x8_any_true(v128_t __a) {
1507 return wasm_v128_any_true(__a);
1508 }
1509
1510 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i32x4_any_true(v128_t __a)1511 wasm_i32x4_any_true(v128_t __a) {
1512 return wasm_v128_any_true(__a);
1513 }
1514
1515 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_add_sat")
wasm_i8x16_add_saturate(v128_t __a,v128_t __b)1516 wasm_i8x16_add_saturate(v128_t __a, v128_t __b) {
1517 return wasm_i8x16_add_sat(__a, __b);
1518 }
1519
1520 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_add_sat")
wasm_u8x16_add_saturate(v128_t __a,v128_t __b)1521 wasm_u8x16_add_saturate(v128_t __a, v128_t __b) {
1522 return wasm_u8x16_add_sat(__a, __b);
1523 }
1524
1525 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_sub_sat")
wasm_i8x16_sub_saturate(v128_t __a,v128_t __b)1526 wasm_i8x16_sub_saturate(v128_t __a, v128_t __b) {
1527 return wasm_i8x16_sub_sat(__a, __b);
1528 }
1529
1530 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_sub_sat")
wasm_u8x16_sub_saturate(v128_t __a,v128_t __b)1531 wasm_u8x16_sub_saturate(v128_t __a, v128_t __b) {
1532 return wasm_u8x16_sub_sat(__a, __b);
1533 }
1534
1535 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_add_sat")
wasm_i16x8_add_saturate(v128_t __a,v128_t __b)1536 wasm_i16x8_add_saturate(v128_t __a, v128_t __b) {
1537 return wasm_i16x8_add_sat(__a, __b);
1538 }
1539
1540 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_add_sat")
wasm_u16x8_add_saturate(v128_t __a,v128_t __b)1541 wasm_u16x8_add_saturate(v128_t __a, v128_t __b) {
1542 return wasm_u16x8_add_sat(__a, __b);
1543 }
1544
1545 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_sub_sat")
wasm_i16x8_sub_saturate(v128_t __a,v128_t __b)1546 wasm_i16x8_sub_saturate(v128_t __a, v128_t __b) {
1547 return wasm_i16x8_sub_sat(__a, __b);
1548 }
1549
1550 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_sub_sat")
wasm_u16x8_sub_saturate(v128_t __a,v128_t __b)1551 wasm_u16x8_sub_saturate(v128_t __a, v128_t __b) {
1552 return wasm_u16x8_sub_sat(__a, __b);
1553 }
1554
1555 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_low_i8x16")
wasm_i16x8_widen_low_i8x16(v128_t __a)1556 wasm_i16x8_widen_low_i8x16(v128_t __a) {
1557 return wasm_i16x8_extend_low_i8x16(__a);
1558 }
1559
1560 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_high_i8x16")
wasm_i16x8_widen_high_i8x16(v128_t __a)1561 wasm_i16x8_widen_high_i8x16(v128_t __a) {
1562 return wasm_i16x8_extend_high_i8x16(__a);
1563 }
1564
1565 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_low_u8x16")
wasm_i16x8_widen_low_u8x16(v128_t __a)1566 wasm_i16x8_widen_low_u8x16(v128_t __a) {
1567 return wasm_u16x8_extend_low_u8x16(__a);
1568 }
1569
1570 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_high_u8x16")
wasm_i16x8_widen_high_u8x16(v128_t __a)1571 wasm_i16x8_widen_high_u8x16(v128_t __a) {
1572 return wasm_u16x8_extend_high_u8x16(__a);
1573 }
1574
1575 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_low_i16x8")
wasm_i32x4_widen_low_i16x8(v128_t __a)1576 wasm_i32x4_widen_low_i16x8(v128_t __a) {
1577 return wasm_i32x4_extend_low_i16x8(__a);
1578 }
1579
1580 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_high_i16x8")
wasm_i32x4_widen_high_i16x8(v128_t __a)1581 wasm_i32x4_widen_high_i16x8(v128_t __a) {
1582 return wasm_i32x4_extend_high_i16x8(__a);
1583 }
1584
1585 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_low_u16x8")
wasm_i32x4_widen_low_u16x8(v128_t __a)1586 wasm_i32x4_widen_low_u16x8(v128_t __a) {
1587 return wasm_u32x4_extend_low_u16x8(__a);
1588 }
1589
1590 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_high_u16x8")
wasm_i32x4_widen_high_u16x8(v128_t __a)1591 wasm_i32x4_widen_high_u16x8(v128_t __a) {
1592 return wasm_u32x4_extend_high_u16x8(__a);
1593 }
1594
1595 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_trunc_sat_f32x4")
wasm_i32x4_trunc_saturate_f32x4(v128_t __a)1596 wasm_i32x4_trunc_saturate_f32x4(v128_t __a) {
1597 return wasm_i32x4_trunc_sat_f32x4(__a);
1598 }
1599
1600 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_trunc_sat_f32x4")
wasm_u32x4_trunc_saturate_f32x4(v128_t __a)1601 wasm_u32x4_trunc_saturate_f32x4(v128_t __a) {
1602 return wasm_u32x4_trunc_sat_f32x4(__a);
1603 }
1604
1605 // Undefine helper macros
1606 #undef __DEFAULT_FN_ATTRS
1607 #undef __DEPRECATED_FN_ATTRS
1608
1609 #endif // __WASM_SIMD128_H
1610