1 /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------===
2  *
3  * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4  * See https://llvm.org/LICENSE.txt for license information.
5  * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6  *
7  *===-----------------------------------------------------------------------===
8  */
9 
10 #ifndef __WASM_SIMD128_H
11 #define __WASM_SIMD128_H
12 
13 #include <stdbool.h>
14 #include <stdint.h>
15 
16 // User-facing type
17 typedef int32_t v128_t __attribute__((__vector_size__(16), __aligned__(16)));
18 
19 // Internal types determined by clang builtin definitions
20 typedef int32_t __v128_u __attribute__((__vector_size__(16), __aligned__(1)));
21 typedef signed char __i8x16
22     __attribute__((__vector_size__(16), __aligned__(16)));
23 typedef unsigned char __u8x16
24     __attribute__((__vector_size__(16), __aligned__(16)));
25 typedef short __i16x8 __attribute__((__vector_size__(16), __aligned__(16)));
26 typedef unsigned short __u16x8
27     __attribute__((__vector_size__(16), __aligned__(16)));
28 typedef int __i32x4 __attribute__((__vector_size__(16), __aligned__(16)));
29 typedef unsigned int __u32x4
30     __attribute__((__vector_size__(16), __aligned__(16)));
31 typedef long long __i64x2 __attribute__((__vector_size__(16), __aligned__(16)));
32 typedef unsigned long long __u64x2
33     __attribute__((__vector_size__(16), __aligned__(16)));
34 typedef float __f32x4 __attribute__((__vector_size__(16), __aligned__(16)));
35 typedef double __f64x2 __attribute__((__vector_size__(16), __aligned__(16)));
36 
37 typedef signed char __i8x8 __attribute__((__vector_size__(8), __aligned__(8)));
38 typedef unsigned char __u8x8
39     __attribute__((__vector_size__(8), __aligned__(8)));
40 typedef short __i16x4 __attribute__((__vector_size__(8), __aligned__(8)));
41 typedef unsigned short __u16x4
42     __attribute__((__vector_size__(8), __aligned__(8)));
43 typedef int __i32x2 __attribute__((__vector_size__(8), __aligned__(8)));
44 typedef unsigned int __u32x2
45     __attribute__((__vector_size__(8), __aligned__(8)));
46 typedef float __f32x2 __attribute__((__vector_size__(8), __aligned__(8)));
47 
48 #define __DEFAULT_FN_ATTRS                                                     \
49   __attribute__((__always_inline__, __nodebug__, __target__("simd128"),        \
50                  __min_vector_width__(128)))
51 
52 #define __REQUIRE_CONSTANT(c)                                                  \
53   __attribute__((__diagnose_if__(!__builtin_constant_p(c),                     \
54                                  #c " must be constant", "error")))
55 
wasm_v128_load(const void * __mem)56 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load(const void *__mem) {
57   // UB-free unaligned access copied from xmmintrin.h
58   struct __wasm_v128_load_struct {
59     __v128_u __v;
60   } __attribute__((__packed__, __may_alias__));
61   return ((const struct __wasm_v128_load_struct *)__mem)->__v;
62 }
63 
64 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load8_splat(const void * __mem)65 wasm_v128_load8_splat(const void *__mem) {
66   struct __wasm_v128_load8_splat_struct {
67     uint8_t __v;
68   } __attribute__((__packed__, __may_alias__));
69   uint8_t __v = ((const struct __wasm_v128_load8_splat_struct *)__mem)->__v;
70   return (v128_t)(__u8x16){__v, __v, __v, __v, __v, __v, __v, __v,
71                            __v, __v, __v, __v, __v, __v, __v, __v};
72 }
73 
74 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load16_splat(const void * __mem)75 wasm_v128_load16_splat(const void *__mem) {
76   struct __wasm_v128_load16_splat_struct {
77     uint16_t __v;
78   } __attribute__((__packed__, __may_alias__));
79   uint16_t __v = ((const struct __wasm_v128_load16_splat_struct *)__mem)->__v;
80   return (v128_t)(__u16x8){__v, __v, __v, __v, __v, __v, __v, __v};
81 }
82 
83 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_splat(const void * __mem)84 wasm_v128_load32_splat(const void *__mem) {
85   struct __wasm_v128_load32_splat_struct {
86     uint32_t __v;
87   } __attribute__((__packed__, __may_alias__));
88   uint32_t __v = ((const struct __wasm_v128_load32_splat_struct *)__mem)->__v;
89   return (v128_t)(__u32x4){__v, __v, __v, __v};
90 }
91 
92 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_splat(const void * __mem)93 wasm_v128_load64_splat(const void *__mem) {
94   struct __wasm_v128_load64_splat_struct {
95     uint64_t __v;
96   } __attribute__((__packed__, __may_alias__));
97   uint64_t __v = ((const struct __wasm_v128_load64_splat_struct *)__mem)->__v;
98   return (v128_t)(__u64x2){__v, __v};
99 }
100 
101 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_load8x8(const void * __mem)102 wasm_i16x8_load8x8(const void *__mem) {
103   struct __wasm_i16x8_load8x8_struct {
104     __i8x8 __v;
105   } __attribute__((__packed__, __may_alias__));
106   __i8x8 __v = ((const struct __wasm_i16x8_load8x8_struct *)__mem)->__v;
107   return (v128_t) __builtin_convertvector(__v, __i16x8);
108 }
109 
110 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_load8x8(const void * __mem)111 wasm_u16x8_load8x8(const void *__mem) {
112   struct __wasm_u16x8_load8x8_struct {
113     __u8x8 __v;
114   } __attribute__((__packed__, __may_alias__));
115   __u8x8 __v = ((const struct __wasm_u16x8_load8x8_struct *)__mem)->__v;
116   return (v128_t) __builtin_convertvector(__v, __u16x8);
117 }
118 
119 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_load16x4(const void * __mem)120 wasm_i32x4_load16x4(const void *__mem) {
121   struct __wasm_i32x4_load16x4_struct {
122     __i16x4 __v;
123   } __attribute__((__packed__, __may_alias__));
124   __i16x4 __v = ((const struct __wasm_i32x4_load16x4_struct *)__mem)->__v;
125   return (v128_t) __builtin_convertvector(__v, __i32x4);
126 }
127 
128 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_load16x4(const void * __mem)129 wasm_u32x4_load16x4(const void *__mem) {
130   struct __wasm_u32x4_load16x4_struct {
131     __u16x4 __v;
132   } __attribute__((__packed__, __may_alias__));
133   __u16x4 __v = ((const struct __wasm_u32x4_load16x4_struct *)__mem)->__v;
134   return (v128_t) __builtin_convertvector(__v, __u32x4);
135 }
136 
137 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_load32x2(const void * __mem)138 wasm_i64x2_load32x2(const void *__mem) {
139   struct __wasm_i64x2_load32x2_struct {
140     __i32x2 __v;
141   } __attribute__((__packed__, __may_alias__));
142   __i32x2 __v = ((const struct __wasm_i64x2_load32x2_struct *)__mem)->__v;
143   return (v128_t) __builtin_convertvector(__v, __i64x2);
144 }
145 
146 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_load32x2(const void * __mem)147 wasm_u64x2_load32x2(const void *__mem) {
148   struct __wasm_u64x2_load32x2_struct {
149     __u32x2 __v;
150   } __attribute__((__packed__, __may_alias__));
151   __u32x2 __v = ((const struct __wasm_u64x2_load32x2_struct *)__mem)->__v;
152   return (v128_t) __builtin_convertvector(__v, __u64x2);
153 }
154 
155 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_zero(const void * __mem)156 wasm_v128_load32_zero(const void *__mem) {
157   struct __wasm_v128_load32_zero_struct {
158     int32_t __v;
159   } __attribute__((__packed__, __may_alias__));
160   int32_t __v = ((const struct __wasm_v128_load32_zero_struct *)__mem)->__v;
161   return (v128_t)(__i32x4){__v, 0, 0, 0};
162 }
163 
164 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_zero(const void * __mem)165 wasm_v128_load64_zero(const void *__mem) {
166   struct __wasm_v128_load64_zero_struct {
167     int64_t __v;
168   } __attribute__((__packed__, __may_alias__));
169   int64_t __v = ((const struct __wasm_v128_load64_zero_struct *)__mem)->__v;
170   return (v128_t)(__i64x2){__v, 0};
171 }
172 
wasm_v128_load8_lane(const void * __mem,v128_t __vec,int __i)173 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load8_lane(
174     const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) {
175   struct __wasm_v128_load8_lane_struct {
176     int8_t __v;
177   } __attribute__((__packed__, __may_alias__));
178   int8_t __v = ((const struct __wasm_v128_load8_lane_struct *)__mem)->__v;
179   __i8x16 __ret = (__i8x16)__vec;
180   __ret[__i] = __v;
181   return (v128_t)__ret;
182 }
183 
wasm_v128_load16_lane(const void * __mem,v128_t __vec,int __i)184 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load16_lane(
185     const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) {
186   struct __wasm_v128_load16_lane_struct {
187     int16_t __v;
188   } __attribute__((__packed__, __may_alias__));
189   int16_t __v = ((const struct __wasm_v128_load16_lane_struct *)__mem)->__v;
190   __i16x8 __ret = (__i16x8)__vec;
191   __ret[__i] = __v;
192   return (v128_t)__ret;
193 }
194 
wasm_v128_load32_lane(const void * __mem,v128_t __vec,int __i)195 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load32_lane(
196     const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) {
197   struct __wasm_v128_load32_lane_struct {
198     int32_t __v;
199   } __attribute__((__packed__, __may_alias__));
200   int32_t __v = ((const struct __wasm_v128_load32_lane_struct *)__mem)->__v;
201   __i32x4 __ret = (__i32x4)__vec;
202   __ret[__i] = __v;
203   return (v128_t)__ret;
204 }
205 
wasm_v128_load64_lane(const void * __mem,v128_t __vec,int __i)206 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load64_lane(
207     const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) {
208   struct __wasm_v128_load64_lane_struct {
209     int64_t __v;
210   } __attribute__((__packed__, __may_alias__));
211   int64_t __v = ((const struct __wasm_v128_load64_lane_struct *)__mem)->__v;
212   __i64x2 __ret = (__i64x2)__vec;
213   __ret[__i] = __v;
214   return (v128_t)__ret;
215 }
216 
wasm_v128_store(void * __mem,v128_t __a)217 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store(void *__mem,
218                                                           v128_t __a) {
219   // UB-free unaligned access copied from xmmintrin.h
220   struct __wasm_v128_store_struct {
221     __v128_u __v;
222   } __attribute__((__packed__, __may_alias__));
223   ((struct __wasm_v128_store_struct *)__mem)->__v = __a;
224 }
225 
wasm_v128_store8_lane(void * __mem,v128_t __vec,int __i)226 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store8_lane(void *__mem,
227                                                                 v128_t __vec,
228                                                                 int __i)
229     __REQUIRE_CONSTANT(__i) {
230   struct __wasm_v128_store8_lane_struct {
231     int8_t __v;
232   } __attribute__((__packed__, __may_alias__));
233   ((struct __wasm_v128_store8_lane_struct *)__mem)->__v = ((__i8x16)__vec)[__i];
234 }
235 
wasm_v128_store16_lane(void * __mem,v128_t __vec,int __i)236 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store16_lane(void *__mem,
237                                                                  v128_t __vec,
238                                                                  int __i)
239     __REQUIRE_CONSTANT(__i) {
240   struct __wasm_v128_store16_lane_struct {
241     int16_t __v;
242   } __attribute__((__packed__, __may_alias__));
243   ((struct __wasm_v128_store16_lane_struct *)__mem)->__v =
244       ((__i16x8)__vec)[__i];
245 }
246 
wasm_v128_store32_lane(void * __mem,v128_t __vec,int __i)247 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store32_lane(void *__mem,
248                                                                  v128_t __vec,
249                                                                  int __i)
250     __REQUIRE_CONSTANT(__i) {
251   struct __wasm_v128_store32_lane_struct {
252     int32_t __v;
253   } __attribute__((__packed__, __may_alias__));
254   ((struct __wasm_v128_store32_lane_struct *)__mem)->__v =
255       ((__i32x4)__vec)[__i];
256 }
257 
wasm_v128_store64_lane(void * __mem,v128_t __vec,int __i)258 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store64_lane(void *__mem,
259                                                                  v128_t __vec,
260                                                                  int __i)
261     __REQUIRE_CONSTANT(__i) {
262   struct __wasm_v128_store64_lane_struct {
263     int64_t __v;
264   } __attribute__((__packed__, __may_alias__));
265   ((struct __wasm_v128_store64_lane_struct *)__mem)->__v =
266       ((__i64x2)__vec)[__i];
267 }
268 
269 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_make(int8_t __c0,int8_t __c1,int8_t __c2,int8_t __c3,int8_t __c4,int8_t __c5,int8_t __c6,int8_t __c7,int8_t __c8,int8_t __c9,int8_t __c10,int8_t __c11,int8_t __c12,int8_t __c13,int8_t __c14,int8_t __c15)270 wasm_i8x16_make(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3, int8_t __c4,
271                 int8_t __c5, int8_t __c6, int8_t __c7, int8_t __c8, int8_t __c9,
272                 int8_t __c10, int8_t __c11, int8_t __c12, int8_t __c13,
273                 int8_t __c14, int8_t __c15) {
274   return (v128_t)(__i8x16){__c0,  __c1,  __c2,  __c3, __c4,  __c5,
275                            __c6,  __c7,  __c8,  __c9, __c10, __c11,
276                            __c12, __c13, __c14, __c15};
277 }
278 
279 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_make(int16_t __c0,int16_t __c1,int16_t __c2,int16_t __c3,int16_t __c4,int16_t __c5,int16_t __c6,int16_t __c7)280 wasm_i16x8_make(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3,
281                 int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7) {
282   return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7};
283 }
284 
wasm_i32x4_make(int32_t __c0,int32_t __c1,int32_t __c2,int32_t __c3)285 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_make(int32_t __c0,
286                                                             int32_t __c1,
287                                                             int32_t __c2,
288                                                             int32_t __c3) {
289   return (v128_t)(__i32x4){__c0, __c1, __c2, __c3};
290 }
291 
wasm_i64x2_make(int64_t __c0,int64_t __c1)292 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_make(int64_t __c0,
293                                                             int64_t __c1) {
294   return (v128_t)(__i64x2){__c0, __c1};
295 }
296 
wasm_f32x4_make(float __c0,float __c1,float __c2,float __c3)297 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_make(float __c0,
298                                                             float __c1,
299                                                             float __c2,
300                                                             float __c3) {
301   return (v128_t)(__f32x4){__c0, __c1, __c2, __c3};
302 }
303 
wasm_f64x2_make(double __c0,double __c1)304 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_make(double __c0,
305                                                             double __c1) {
306   return (v128_t)(__f64x2){__c0, __c1};
307 }
308 
309 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_const(int8_t __c0,int8_t __c1,int8_t __c2,int8_t __c3,int8_t __c4,int8_t __c5,int8_t __c6,int8_t __c7,int8_t __c8,int8_t __c9,int8_t __c10,int8_t __c11,int8_t __c12,int8_t __c13,int8_t __c14,int8_t __c15)310 wasm_i8x16_const(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3,
311                  int8_t __c4, int8_t __c5, int8_t __c6, int8_t __c7,
312                  int8_t __c8, int8_t __c9, int8_t __c10, int8_t __c11,
313                  int8_t __c12, int8_t __c13, int8_t __c14, int8_t __c15)
314     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
315         __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4)
316             __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6)
317                 __REQUIRE_CONSTANT(__c7) __REQUIRE_CONSTANT(__c8)
318                     __REQUIRE_CONSTANT(__c9) __REQUIRE_CONSTANT(__c10)
319                         __REQUIRE_CONSTANT(__c11) __REQUIRE_CONSTANT(__c12)
320                             __REQUIRE_CONSTANT(__c13) __REQUIRE_CONSTANT(__c14)
321                                 __REQUIRE_CONSTANT(__c15) {
322   return (v128_t)(__i8x16){__c0,  __c1,  __c2,  __c3, __c4,  __c5,
323                            __c6,  __c7,  __c8,  __c9, __c10, __c11,
324                            __c12, __c13, __c14, __c15};
325 }
326 
327 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_const(int16_t __c0,int16_t __c1,int16_t __c2,int16_t __c3,int16_t __c4,int16_t __c5,int16_t __c6,int16_t __c7)328 wasm_i16x8_const(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3,
329                  int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7)
330     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
331         __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4)
332             __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6)
333                 __REQUIRE_CONSTANT(__c7) {
334   return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7};
335 }
336 
337 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_const(int32_t __c0,int32_t __c1,int32_t __c2,int32_t __c3)338 wasm_i32x4_const(int32_t __c0, int32_t __c1, int32_t __c2, int32_t __c3)
339     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
340         __REQUIRE_CONSTANT(__c3) {
341   return (v128_t)(__i32x4){__c0, __c1, __c2, __c3};
342 }
343 
wasm_i64x2_const(int64_t __c0,int64_t __c1)344 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const(int64_t __c0,
345                                                              int64_t __c1)
346     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) {
347   return (v128_t)(__i64x2){__c0, __c1};
348 }
349 
350 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_const(float __c0,float __c1,float __c2,float __c3)351 wasm_f32x4_const(float __c0, float __c1, float __c2, float __c3)
352     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2)
353         __REQUIRE_CONSTANT(__c3) {
354   return (v128_t)(__f32x4){__c0, __c1, __c2, __c3};
355 }
356 
wasm_f64x2_const(double __c0,double __c1)357 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const(double __c0,
358                                                              double __c1)
359     __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) {
360   return (v128_t)(__f64x2){__c0, __c1};
361 }
362 
wasm_i8x16_const_splat(int8_t __c)363 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_const_splat(int8_t __c)
364     __REQUIRE_CONSTANT(__c) {
365   return (v128_t)(__i8x16){__c, __c, __c, __c, __c, __c, __c, __c,
366                            __c, __c, __c, __c, __c, __c, __c, __c};
367 }
368 
wasm_i16x8_const_splat(int16_t __c)369 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_const_splat(int16_t __c)
370     __REQUIRE_CONSTANT(__c) {
371   return (v128_t)(__i16x8){__c, __c, __c, __c, __c, __c, __c, __c};
372 }
373 
wasm_i32x4_const_splat(int32_t __c)374 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_const_splat(int32_t __c)
375     __REQUIRE_CONSTANT(__c) {
376   return (v128_t)(__i32x4){__c, __c, __c, __c};
377 }
378 
wasm_i64x2_const_splat(int64_t __c)379 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const_splat(int64_t __c)
380     __REQUIRE_CONSTANT(__c) {
381   return (v128_t)(__i64x2){__c, __c};
382 }
383 
wasm_f32x4_const_splat(float __c)384 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_const_splat(float __c)
385     __REQUIRE_CONSTANT(__c) {
386   return (v128_t)(__f32x4){__c, __c, __c, __c};
387 }
388 
wasm_f64x2_const_splat(double __c)389 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const_splat(double __c)
390     __REQUIRE_CONSTANT(__c) {
391   return (v128_t)(__f64x2){__c, __c};
392 }
393 
wasm_i8x16_splat(int8_t __a)394 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_splat(int8_t __a) {
395   return (v128_t)(__i8x16){__a, __a, __a, __a, __a, __a, __a, __a,
396                            __a, __a, __a, __a, __a, __a, __a, __a};
397 }
398 
wasm_i8x16_extract_lane(v128_t __a,int __i)399 static __inline__ int8_t __DEFAULT_FN_ATTRS wasm_i8x16_extract_lane(v128_t __a,
400                                                                     int __i)
401     __REQUIRE_CONSTANT(__i) {
402   return ((__i8x16)__a)[__i];
403 }
404 
wasm_u8x16_extract_lane(v128_t __a,int __i)405 static __inline__ uint8_t __DEFAULT_FN_ATTRS wasm_u8x16_extract_lane(v128_t __a,
406                                                                      int __i)
407     __REQUIRE_CONSTANT(__i) {
408   return ((__u8x16)__a)[__i];
409 }
410 
wasm_i8x16_replace_lane(v128_t __a,int __i,int8_t __b)411 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_replace_lane(v128_t __a,
412                                                                     int __i,
413                                                                     int8_t __b)
414     __REQUIRE_CONSTANT(__i) {
415   __i8x16 __v = (__i8x16)__a;
416   __v[__i] = __b;
417   return (v128_t)__v;
418 }
419 
wasm_i16x8_splat(int16_t __a)420 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_splat(int16_t __a) {
421   return (v128_t)(__i16x8){__a, __a, __a, __a, __a, __a, __a, __a};
422 }
423 
wasm_i16x8_extract_lane(v128_t __a,int __i)424 static __inline__ int16_t __DEFAULT_FN_ATTRS wasm_i16x8_extract_lane(v128_t __a,
425                                                                      int __i)
426     __REQUIRE_CONSTANT(__i) {
427   return ((__i16x8)__a)[__i];
428 }
429 
430 static __inline__ uint16_t __DEFAULT_FN_ATTRS
wasm_u16x8_extract_lane(v128_t __a,int __i)431 wasm_u16x8_extract_lane(v128_t __a, int __i) __REQUIRE_CONSTANT(__i) {
432   return ((__u16x8)__a)[__i];
433 }
434 
wasm_i16x8_replace_lane(v128_t __a,int __i,int16_t __b)435 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_replace_lane(v128_t __a,
436                                                                     int __i,
437                                                                     int16_t __b)
438     __REQUIRE_CONSTANT(__i) {
439   __i16x8 __v = (__i16x8)__a;
440   __v[__i] = __b;
441   return (v128_t)__v;
442 }
443 
wasm_i32x4_splat(int32_t __a)444 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_splat(int32_t __a) {
445   return (v128_t)(__i32x4){__a, __a, __a, __a};
446 }
447 
wasm_i32x4_extract_lane(v128_t __a,int __i)448 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i32x4_extract_lane(v128_t __a,
449                                                                      int __i)
450     __REQUIRE_CONSTANT(__i) {
451   return ((__i32x4)__a)[__i];
452 }
453 
wasm_i32x4_replace_lane(v128_t __a,int __i,int32_t __b)454 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_replace_lane(v128_t __a,
455                                                                     int __i,
456                                                                     int32_t __b)
457     __REQUIRE_CONSTANT(__i) {
458   __i32x4 __v = (__i32x4)__a;
459   __v[__i] = __b;
460   return (v128_t)__v;
461 }
462 
wasm_i64x2_splat(int64_t __a)463 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_splat(int64_t __a) {
464   return (v128_t)(__i64x2){__a, __a};
465 }
466 
wasm_i64x2_extract_lane(v128_t __a,int __i)467 static __inline__ int64_t __DEFAULT_FN_ATTRS wasm_i64x2_extract_lane(v128_t __a,
468                                                                      int __i)
469     __REQUIRE_CONSTANT(__i) {
470   return ((__i64x2)__a)[__i];
471 }
472 
wasm_i64x2_replace_lane(v128_t __a,int __i,int64_t __b)473 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_replace_lane(v128_t __a,
474                                                                     int __i,
475                                                                     int64_t __b)
476     __REQUIRE_CONSTANT(__i) {
477   __i64x2 __v = (__i64x2)__a;
478   __v[__i] = __b;
479   return (v128_t)__v;
480 }
481 
wasm_f32x4_splat(float __a)482 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_splat(float __a) {
483   return (v128_t)(__f32x4){__a, __a, __a, __a};
484 }
485 
wasm_f32x4_extract_lane(v128_t __a,int __i)486 static __inline__ float __DEFAULT_FN_ATTRS wasm_f32x4_extract_lane(v128_t __a,
487                                                                    int __i)
488     __REQUIRE_CONSTANT(__i) {
489   return ((__f32x4)__a)[__i];
490 }
491 
wasm_f32x4_replace_lane(v128_t __a,int __i,float __b)492 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_replace_lane(v128_t __a,
493                                                                     int __i,
494                                                                     float __b)
495     __REQUIRE_CONSTANT(__i) {
496   __f32x4 __v = (__f32x4)__a;
497   __v[__i] = __b;
498   return (v128_t)__v;
499 }
500 
wasm_f64x2_splat(double __a)501 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_splat(double __a) {
502   return (v128_t)(__f64x2){__a, __a};
503 }
504 
wasm_f64x2_extract_lane(v128_t __a,int __i)505 static __inline__ double __DEFAULT_FN_ATTRS wasm_f64x2_extract_lane(v128_t __a,
506                                                                     int __i)
507     __REQUIRE_CONSTANT(__i) {
508   return ((__f64x2)__a)[__i];
509 }
510 
wasm_f64x2_replace_lane(v128_t __a,int __i,double __b)511 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_replace_lane(v128_t __a,
512                                                                     int __i,
513                                                                     double __b)
514     __REQUIRE_CONSTANT(__i) {
515   __f64x2 __v = (__f64x2)__a;
516   __v[__i] = __b;
517   return (v128_t)__v;
518 }
519 
wasm_i8x16_eq(v128_t __a,v128_t __b)520 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_eq(v128_t __a,
521                                                           v128_t __b) {
522   return (v128_t)((__i8x16)__a == (__i8x16)__b);
523 }
524 
wasm_i8x16_ne(v128_t __a,v128_t __b)525 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ne(v128_t __a,
526                                                           v128_t __b) {
527   return (v128_t)((__i8x16)__a != (__i8x16)__b);
528 }
529 
wasm_i8x16_lt(v128_t __a,v128_t __b)530 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_lt(v128_t __a,
531                                                           v128_t __b) {
532   return (v128_t)((__i8x16)__a < (__i8x16)__b);
533 }
534 
wasm_u8x16_lt(v128_t __a,v128_t __b)535 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_lt(v128_t __a,
536                                                           v128_t __b) {
537   return (v128_t)((__u8x16)__a < (__u8x16)__b);
538 }
539 
wasm_i8x16_gt(v128_t __a,v128_t __b)540 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_gt(v128_t __a,
541                                                           v128_t __b) {
542   return (v128_t)((__i8x16)__a > (__i8x16)__b);
543 }
544 
wasm_u8x16_gt(v128_t __a,v128_t __b)545 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_gt(v128_t __a,
546                                                           v128_t __b) {
547   return (v128_t)((__u8x16)__a > (__u8x16)__b);
548 }
549 
wasm_i8x16_le(v128_t __a,v128_t __b)550 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_le(v128_t __a,
551                                                           v128_t __b) {
552   return (v128_t)((__i8x16)__a <= (__i8x16)__b);
553 }
554 
wasm_u8x16_le(v128_t __a,v128_t __b)555 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_le(v128_t __a,
556                                                           v128_t __b) {
557   return (v128_t)((__u8x16)__a <= (__u8x16)__b);
558 }
559 
wasm_i8x16_ge(v128_t __a,v128_t __b)560 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ge(v128_t __a,
561                                                           v128_t __b) {
562   return (v128_t)((__i8x16)__a >= (__i8x16)__b);
563 }
564 
wasm_u8x16_ge(v128_t __a,v128_t __b)565 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_ge(v128_t __a,
566                                                           v128_t __b) {
567   return (v128_t)((__u8x16)__a >= (__u8x16)__b);
568 }
569 
wasm_i16x8_eq(v128_t __a,v128_t __b)570 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_eq(v128_t __a,
571                                                           v128_t __b) {
572   return (v128_t)((__i16x8)__a == (__i16x8)__b);
573 }
574 
wasm_i16x8_ne(v128_t __a,v128_t __b)575 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ne(v128_t __a,
576                                                           v128_t __b) {
577   return (v128_t)((__u16x8)__a != (__u16x8)__b);
578 }
579 
wasm_i16x8_lt(v128_t __a,v128_t __b)580 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_lt(v128_t __a,
581                                                           v128_t __b) {
582   return (v128_t)((__i16x8)__a < (__i16x8)__b);
583 }
584 
wasm_u16x8_lt(v128_t __a,v128_t __b)585 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_lt(v128_t __a,
586                                                           v128_t __b) {
587   return (v128_t)((__u16x8)__a < (__u16x8)__b);
588 }
589 
wasm_i16x8_gt(v128_t __a,v128_t __b)590 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_gt(v128_t __a,
591                                                           v128_t __b) {
592   return (v128_t)((__i16x8)__a > (__i16x8)__b);
593 }
594 
wasm_u16x8_gt(v128_t __a,v128_t __b)595 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_gt(v128_t __a,
596                                                           v128_t __b) {
597   return (v128_t)((__u16x8)__a > (__u16x8)__b);
598 }
599 
wasm_i16x8_le(v128_t __a,v128_t __b)600 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_le(v128_t __a,
601                                                           v128_t __b) {
602   return (v128_t)((__i16x8)__a <= (__i16x8)__b);
603 }
604 
wasm_u16x8_le(v128_t __a,v128_t __b)605 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_le(v128_t __a,
606                                                           v128_t __b) {
607   return (v128_t)((__u16x8)__a <= (__u16x8)__b);
608 }
609 
wasm_i16x8_ge(v128_t __a,v128_t __b)610 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ge(v128_t __a,
611                                                           v128_t __b) {
612   return (v128_t)((__i16x8)__a >= (__i16x8)__b);
613 }
614 
wasm_u16x8_ge(v128_t __a,v128_t __b)615 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_ge(v128_t __a,
616                                                           v128_t __b) {
617   return (v128_t)((__u16x8)__a >= (__u16x8)__b);
618 }
619 
wasm_i32x4_eq(v128_t __a,v128_t __b)620 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_eq(v128_t __a,
621                                                           v128_t __b) {
622   return (v128_t)((__i32x4)__a == (__i32x4)__b);
623 }
624 
wasm_i32x4_ne(v128_t __a,v128_t __b)625 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ne(v128_t __a,
626                                                           v128_t __b) {
627   return (v128_t)((__i32x4)__a != (__i32x4)__b);
628 }
629 
wasm_i32x4_lt(v128_t __a,v128_t __b)630 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_lt(v128_t __a,
631                                                           v128_t __b) {
632   return (v128_t)((__i32x4)__a < (__i32x4)__b);
633 }
634 
wasm_u32x4_lt(v128_t __a,v128_t __b)635 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_lt(v128_t __a,
636                                                           v128_t __b) {
637   return (v128_t)((__u32x4)__a < (__u32x4)__b);
638 }
639 
wasm_i32x4_gt(v128_t __a,v128_t __b)640 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_gt(v128_t __a,
641                                                           v128_t __b) {
642   return (v128_t)((__i32x4)__a > (__i32x4)__b);
643 }
644 
wasm_u32x4_gt(v128_t __a,v128_t __b)645 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_gt(v128_t __a,
646                                                           v128_t __b) {
647   return (v128_t)((__u32x4)__a > (__u32x4)__b);
648 }
649 
wasm_i32x4_le(v128_t __a,v128_t __b)650 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_le(v128_t __a,
651                                                           v128_t __b) {
652   return (v128_t)((__i32x4)__a <= (__i32x4)__b);
653 }
654 
wasm_u32x4_le(v128_t __a,v128_t __b)655 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_le(v128_t __a,
656                                                           v128_t __b) {
657   return (v128_t)((__u32x4)__a <= (__u32x4)__b);
658 }
659 
wasm_i32x4_ge(v128_t __a,v128_t __b)660 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ge(v128_t __a,
661                                                           v128_t __b) {
662   return (v128_t)((__i32x4)__a >= (__i32x4)__b);
663 }
664 
wasm_u32x4_ge(v128_t __a,v128_t __b)665 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_ge(v128_t __a,
666                                                           v128_t __b) {
667   return (v128_t)((__u32x4)__a >= (__u32x4)__b);
668 }
669 
wasm_i64x2_eq(v128_t __a,v128_t __b)670 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_eq(v128_t __a,
671                                                           v128_t __b) {
672   return (v128_t)((__i64x2)__a == (__i64x2)__b);
673 }
674 
wasm_i64x2_ne(v128_t __a,v128_t __b)675 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ne(v128_t __a,
676                                                           v128_t __b) {
677   return (v128_t)((__i64x2)__a != (__i64x2)__b);
678 }
679 
wasm_i64x2_lt(v128_t __a,v128_t __b)680 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_lt(v128_t __a,
681                                                           v128_t __b) {
682   return (v128_t)((__i64x2)__a < (__i64x2)__b);
683 }
684 
wasm_i64x2_gt(v128_t __a,v128_t __b)685 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_gt(v128_t __a,
686                                                           v128_t __b) {
687   return (v128_t)((__i64x2)__a > (__i64x2)__b);
688 }
689 
wasm_i64x2_le(v128_t __a,v128_t __b)690 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_le(v128_t __a,
691                                                           v128_t __b) {
692   return (v128_t)((__i64x2)__a <= (__i64x2)__b);
693 }
694 
wasm_i64x2_ge(v128_t __a,v128_t __b)695 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ge(v128_t __a,
696                                                           v128_t __b) {
697   return (v128_t)((__i64x2)__a >= (__i64x2)__b);
698 }
699 
wasm_f32x4_eq(v128_t __a,v128_t __b)700 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_eq(v128_t __a,
701                                                           v128_t __b) {
702   return (v128_t)((__f32x4)__a == (__f32x4)__b);
703 }
704 
wasm_f32x4_ne(v128_t __a,v128_t __b)705 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ne(v128_t __a,
706                                                           v128_t __b) {
707   return (v128_t)((__f32x4)__a != (__f32x4)__b);
708 }
709 
wasm_f32x4_lt(v128_t __a,v128_t __b)710 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_lt(v128_t __a,
711                                                           v128_t __b) {
712   return (v128_t)((__f32x4)__a < (__f32x4)__b);
713 }
714 
wasm_f32x4_gt(v128_t __a,v128_t __b)715 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_gt(v128_t __a,
716                                                           v128_t __b) {
717   return (v128_t)((__f32x4)__a > (__f32x4)__b);
718 }
719 
wasm_f32x4_le(v128_t __a,v128_t __b)720 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_le(v128_t __a,
721                                                           v128_t __b) {
722   return (v128_t)((__f32x4)__a <= (__f32x4)__b);
723 }
724 
wasm_f32x4_ge(v128_t __a,v128_t __b)725 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ge(v128_t __a,
726                                                           v128_t __b) {
727   return (v128_t)((__f32x4)__a >= (__f32x4)__b);
728 }
729 
wasm_f64x2_eq(v128_t __a,v128_t __b)730 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_eq(v128_t __a,
731                                                           v128_t __b) {
732   return (v128_t)((__f64x2)__a == (__f64x2)__b);
733 }
734 
wasm_f64x2_ne(v128_t __a,v128_t __b)735 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ne(v128_t __a,
736                                                           v128_t __b) {
737   return (v128_t)((__f64x2)__a != (__f64x2)__b);
738 }
739 
wasm_f64x2_lt(v128_t __a,v128_t __b)740 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_lt(v128_t __a,
741                                                           v128_t __b) {
742   return (v128_t)((__f64x2)__a < (__f64x2)__b);
743 }
744 
wasm_f64x2_gt(v128_t __a,v128_t __b)745 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_gt(v128_t __a,
746                                                           v128_t __b) {
747   return (v128_t)((__f64x2)__a > (__f64x2)__b);
748 }
749 
wasm_f64x2_le(v128_t __a,v128_t __b)750 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_le(v128_t __a,
751                                                           v128_t __b) {
752   return (v128_t)((__f64x2)__a <= (__f64x2)__b);
753 }
754 
wasm_f64x2_ge(v128_t __a,v128_t __b)755 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ge(v128_t __a,
756                                                           v128_t __b) {
757   return (v128_t)((__f64x2)__a >= (__f64x2)__b);
758 }
759 
wasm_v128_not(v128_t __a)760 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_not(v128_t __a) {
761   return ~__a;
762 }
763 
wasm_v128_and(v128_t __a,v128_t __b)764 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_and(v128_t __a,
765                                                           v128_t __b) {
766   return __a & __b;
767 }
768 
wasm_v128_or(v128_t __a,v128_t __b)769 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_or(v128_t __a,
770                                                          v128_t __b) {
771   return __a | __b;
772 }
773 
wasm_v128_xor(v128_t __a,v128_t __b)774 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_xor(v128_t __a,
775                                                           v128_t __b) {
776   return __a ^ __b;
777 }
778 
wasm_v128_andnot(v128_t __a,v128_t __b)779 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_andnot(v128_t __a,
780                                                              v128_t __b) {
781   return __a & ~__b;
782 }
783 
wasm_v128_any_true(v128_t __a)784 static __inline__ bool __DEFAULT_FN_ATTRS wasm_v128_any_true(v128_t __a) {
785   return __builtin_wasm_any_true_v128((__i8x16)__a);
786 }
787 
wasm_v128_bitselect(v128_t __a,v128_t __b,v128_t __mask)788 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_bitselect(v128_t __a,
789                                                                 v128_t __b,
790                                                                 v128_t __mask) {
791   return (v128_t)__builtin_wasm_bitselect((__i32x4)__a, (__i32x4)__b,
792                                           (__i32x4)__mask);
793 }
794 
wasm_i8x16_abs(v128_t __a)795 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_abs(v128_t __a) {
796   return (v128_t)__builtin_wasm_abs_i8x16((__i8x16)__a);
797 }
798 
wasm_i8x16_neg(v128_t __a)799 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_neg(v128_t __a) {
800   return (v128_t)(-(__u8x16)__a);
801 }
802 
wasm_i8x16_all_true(v128_t __a)803 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i8x16_all_true(v128_t __a) {
804   return __builtin_wasm_all_true_i8x16((__i8x16)__a);
805 }
806 
wasm_i8x16_bitmask(v128_t __a)807 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i8x16_bitmask(v128_t __a) {
808   return __builtin_wasm_bitmask_i8x16((__i8x16)__a);
809 }
810 
wasm_i8x16_popcnt(v128_t __a)811 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_popcnt(v128_t __a) {
812   return (v128_t)__builtin_wasm_popcnt_i8x16((__i8x16)__a);
813 }
814 
wasm_i8x16_shl(v128_t __a,int32_t __b)815 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shl(v128_t __a,
816                                                            int32_t __b) {
817   return (v128_t)((__i8x16)__a << __b);
818 }
819 
wasm_i8x16_shr(v128_t __a,int32_t __b)820 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shr(v128_t __a,
821                                                            int32_t __b) {
822   return (v128_t)((__i8x16)__a >> __b);
823 }
824 
wasm_u8x16_shr(v128_t __a,int32_t __b)825 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_shr(v128_t __a,
826                                                            int32_t __b) {
827   return (v128_t)((__u8x16)__a >> __b);
828 }
829 
wasm_i8x16_add(v128_t __a,v128_t __b)830 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add(v128_t __a,
831                                                            v128_t __b) {
832   return (v128_t)((__u8x16)__a + (__u8x16)__b);
833 }
834 
wasm_i8x16_add_sat(v128_t __a,v128_t __b)835 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add_sat(v128_t __a,
836                                                                v128_t __b) {
837   return (v128_t)__builtin_wasm_add_sat_s_i8x16((__i8x16)__a, (__i8x16)__b);
838 }
839 
wasm_u8x16_add_sat(v128_t __a,v128_t __b)840 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_add_sat(v128_t __a,
841                                                                v128_t __b) {
842   return (v128_t)__builtin_wasm_add_sat_u_i8x16((__u8x16)__a, (__u8x16)__b);
843 }
844 
wasm_i8x16_sub(v128_t __a,v128_t __b)845 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub(v128_t __a,
846                                                            v128_t __b) {
847   return (v128_t)((__u8x16)__a - (__u8x16)__b);
848 }
849 
wasm_i8x16_sub_sat(v128_t __a,v128_t __b)850 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub_sat(v128_t __a,
851                                                                v128_t __b) {
852   return (v128_t)__builtin_wasm_sub_sat_s_i8x16((__i8x16)__a, (__i8x16)__b);
853 }
854 
wasm_u8x16_sub_sat(v128_t __a,v128_t __b)855 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_sub_sat(v128_t __a,
856                                                                v128_t __b) {
857   return (v128_t)__builtin_wasm_sub_sat_u_i8x16((__u8x16)__a, (__u8x16)__b);
858 }
859 
wasm_i8x16_min(v128_t __a,v128_t __b)860 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_min(v128_t __a,
861                                                            v128_t __b) {
862   return (v128_t)__builtin_wasm_min_s_i8x16((__i8x16)__a, (__i8x16)__b);
863 }
864 
wasm_u8x16_min(v128_t __a,v128_t __b)865 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_min(v128_t __a,
866                                                            v128_t __b) {
867   return (v128_t)__builtin_wasm_min_u_i8x16((__u8x16)__a, (__u8x16)__b);
868 }
869 
wasm_i8x16_max(v128_t __a,v128_t __b)870 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_max(v128_t __a,
871                                                            v128_t __b) {
872   return (v128_t)__builtin_wasm_max_s_i8x16((__i8x16)__a, (__i8x16)__b);
873 }
874 
wasm_u8x16_max(v128_t __a,v128_t __b)875 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_max(v128_t __a,
876                                                            v128_t __b) {
877   return (v128_t)__builtin_wasm_max_u_i8x16((__u8x16)__a, (__u8x16)__b);
878 }
879 
wasm_u8x16_avgr(v128_t __a,v128_t __b)880 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_avgr(v128_t __a,
881                                                             v128_t __b) {
882   return (v128_t)__builtin_wasm_avgr_u_i8x16((__u8x16)__a, (__u8x16)__b);
883 }
884 
wasm_i16x8_abs(v128_t __a)885 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_abs(v128_t __a) {
886   return (v128_t)__builtin_wasm_abs_i16x8((__i16x8)__a);
887 }
888 
wasm_i16x8_neg(v128_t __a)889 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_neg(v128_t __a) {
890   return (v128_t)(-(__u16x8)__a);
891 }
892 
wasm_i16x8_all_true(v128_t __a)893 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i16x8_all_true(v128_t __a) {
894   return __builtin_wasm_all_true_i16x8((__i16x8)__a);
895 }
896 
wasm_i16x8_bitmask(v128_t __a)897 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i16x8_bitmask(v128_t __a) {
898   return __builtin_wasm_bitmask_i16x8((__i16x8)__a);
899 }
900 
wasm_i16x8_shl(v128_t __a,int32_t __b)901 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shl(v128_t __a,
902                                                            int32_t __b) {
903   return (v128_t)((__i16x8)__a << __b);
904 }
905 
wasm_i16x8_shr(v128_t __a,int32_t __b)906 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shr(v128_t __a,
907                                                            int32_t __b) {
908   return (v128_t)((__i16x8)__a >> __b);
909 }
910 
wasm_u16x8_shr(v128_t __a,int32_t __b)911 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_shr(v128_t __a,
912                                                            int32_t __b) {
913   return (v128_t)((__u16x8)__a >> __b);
914 }
915 
wasm_i16x8_add(v128_t __a,v128_t __b)916 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add(v128_t __a,
917                                                            v128_t __b) {
918   return (v128_t)((__u16x8)__a + (__u16x8)__b);
919 }
920 
wasm_i16x8_add_sat(v128_t __a,v128_t __b)921 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add_sat(v128_t __a,
922                                                                v128_t __b) {
923   return (v128_t)__builtin_wasm_add_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
924 }
925 
wasm_u16x8_add_sat(v128_t __a,v128_t __b)926 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_add_sat(v128_t __a,
927                                                                v128_t __b) {
928   return (v128_t)__builtin_wasm_add_sat_u_i16x8((__u16x8)__a, (__u16x8)__b);
929 }
930 
wasm_i16x8_sub(v128_t __a,v128_t __b)931 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub(v128_t __a,
932                                                            v128_t __b) {
933   return (v128_t)((__i16x8)__a - (__i16x8)__b);
934 }
935 
wasm_i16x8_sub_sat(v128_t __a,v128_t __b)936 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub_sat(v128_t __a,
937                                                                v128_t __b) {
938   return (v128_t)__builtin_wasm_sub_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
939 }
940 
wasm_u16x8_sub_sat(v128_t __a,v128_t __b)941 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_sub_sat(v128_t __a,
942                                                                v128_t __b) {
943   return (v128_t)__builtin_wasm_sub_sat_u_i16x8((__u16x8)__a, (__u16x8)__b);
944 }
945 
wasm_i16x8_mul(v128_t __a,v128_t __b)946 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_mul(v128_t __a,
947                                                            v128_t __b) {
948   return (v128_t)((__u16x8)__a * (__u16x8)__b);
949 }
950 
wasm_i16x8_min(v128_t __a,v128_t __b)951 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_min(v128_t __a,
952                                                            v128_t __b) {
953   return (v128_t)__builtin_wasm_min_s_i16x8((__i16x8)__a, (__i16x8)__b);
954 }
955 
wasm_u16x8_min(v128_t __a,v128_t __b)956 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_min(v128_t __a,
957                                                            v128_t __b) {
958   return (v128_t)__builtin_wasm_min_u_i16x8((__u16x8)__a, (__u16x8)__b);
959 }
960 
wasm_i16x8_max(v128_t __a,v128_t __b)961 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_max(v128_t __a,
962                                                            v128_t __b) {
963   return (v128_t)__builtin_wasm_max_s_i16x8((__i16x8)__a, (__i16x8)__b);
964 }
965 
wasm_u16x8_max(v128_t __a,v128_t __b)966 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_max(v128_t __a,
967                                                            v128_t __b) {
968   return (v128_t)__builtin_wasm_max_u_i16x8((__u16x8)__a, (__u16x8)__b);
969 }
970 
wasm_u16x8_avgr(v128_t __a,v128_t __b)971 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_avgr(v128_t __a,
972                                                             v128_t __b) {
973   return (v128_t)__builtin_wasm_avgr_u_i16x8((__u16x8)__a, (__u16x8)__b);
974 }
975 
wasm_i32x4_abs(v128_t __a)976 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_abs(v128_t __a) {
977   return (v128_t)__builtin_wasm_abs_i32x4((__i32x4)__a);
978 }
979 
wasm_i32x4_neg(v128_t __a)980 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_neg(v128_t __a) {
981   return (v128_t)(-(__u32x4)__a);
982 }
983 
wasm_i32x4_all_true(v128_t __a)984 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i32x4_all_true(v128_t __a) {
985   return __builtin_wasm_all_true_i32x4((__i32x4)__a);
986 }
987 
wasm_i32x4_bitmask(v128_t __a)988 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i32x4_bitmask(v128_t __a) {
989   return __builtin_wasm_bitmask_i32x4((__i32x4)__a);
990 }
991 
wasm_i32x4_shl(v128_t __a,int32_t __b)992 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shl(v128_t __a,
993                                                            int32_t __b) {
994   return (v128_t)((__i32x4)__a << __b);
995 }
996 
wasm_i32x4_shr(v128_t __a,int32_t __b)997 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shr(v128_t __a,
998                                                            int32_t __b) {
999   return (v128_t)((__i32x4)__a >> __b);
1000 }
1001 
wasm_u32x4_shr(v128_t __a,int32_t __b)1002 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_shr(v128_t __a,
1003                                                            int32_t __b) {
1004   return (v128_t)((__u32x4)__a >> __b);
1005 }
1006 
wasm_i32x4_add(v128_t __a,v128_t __b)1007 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_add(v128_t __a,
1008                                                            v128_t __b) {
1009   return (v128_t)((__u32x4)__a + (__u32x4)__b);
1010 }
1011 
wasm_i32x4_sub(v128_t __a,v128_t __b)1012 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_sub(v128_t __a,
1013                                                            v128_t __b) {
1014   return (v128_t)((__u32x4)__a - (__u32x4)__b);
1015 }
1016 
wasm_i32x4_mul(v128_t __a,v128_t __b)1017 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_mul(v128_t __a,
1018                                                            v128_t __b) {
1019   return (v128_t)((__u32x4)__a * (__u32x4)__b);
1020 }
1021 
wasm_i32x4_min(v128_t __a,v128_t __b)1022 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_min(v128_t __a,
1023                                                            v128_t __b) {
1024   return (v128_t)__builtin_wasm_min_s_i32x4((__i32x4)__a, (__i32x4)__b);
1025 }
1026 
wasm_u32x4_min(v128_t __a,v128_t __b)1027 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_min(v128_t __a,
1028                                                            v128_t __b) {
1029   return (v128_t)__builtin_wasm_min_u_i32x4((__u32x4)__a, (__u32x4)__b);
1030 }
1031 
wasm_i32x4_max(v128_t __a,v128_t __b)1032 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_max(v128_t __a,
1033                                                            v128_t __b) {
1034   return (v128_t)__builtin_wasm_max_s_i32x4((__i32x4)__a, (__i32x4)__b);
1035 }
1036 
wasm_u32x4_max(v128_t __a,v128_t __b)1037 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_max(v128_t __a,
1038                                                            v128_t __b) {
1039   return (v128_t)__builtin_wasm_max_u_i32x4((__u32x4)__a, (__u32x4)__b);
1040 }
1041 
wasm_i32x4_dot_i16x8(v128_t __a,v128_t __b)1042 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_dot_i16x8(v128_t __a,
1043                                                                  v128_t __b) {
1044   return (v128_t)__builtin_wasm_dot_s_i32x4_i16x8((__i16x8)__a, (__i16x8)__b);
1045 }
1046 
wasm_i64x2_abs(v128_t __a)1047 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_abs(v128_t __a) {
1048   return (v128_t)__builtin_wasm_abs_i64x2((__i64x2)__a);
1049 }
1050 
wasm_i64x2_neg(v128_t __a)1051 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_neg(v128_t __a) {
1052   return (v128_t)(-(__u64x2)__a);
1053 }
1054 
wasm_i64x2_all_true(v128_t __a)1055 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i64x2_all_true(v128_t __a) {
1056   return __builtin_wasm_all_true_i64x2((__i64x2)__a);
1057 }
1058 
wasm_i64x2_bitmask(v128_t __a)1059 static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i64x2_bitmask(v128_t __a) {
1060   return __builtin_wasm_bitmask_i64x2((__i64x2)__a);
1061 }
1062 
wasm_i64x2_shl(v128_t __a,int32_t __b)1063 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shl(v128_t __a,
1064                                                            int32_t __b) {
1065   return (v128_t)((__i64x2)__a << (int64_t)__b);
1066 }
1067 
wasm_i64x2_shr(v128_t __a,int32_t __b)1068 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shr(v128_t __a,
1069                                                            int32_t __b) {
1070   return (v128_t)((__i64x2)__a >> (int64_t)__b);
1071 }
1072 
wasm_u64x2_shr(v128_t __a,int32_t __b)1073 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_shr(v128_t __a,
1074                                                            int32_t __b) {
1075   return (v128_t)((__u64x2)__a >> (int64_t)__b);
1076 }
1077 
wasm_i64x2_add(v128_t __a,v128_t __b)1078 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_add(v128_t __a,
1079                                                            v128_t __b) {
1080   return (v128_t)((__u64x2)__a + (__u64x2)__b);
1081 }
1082 
wasm_i64x2_sub(v128_t __a,v128_t __b)1083 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_sub(v128_t __a,
1084                                                            v128_t __b) {
1085   return (v128_t)((__u64x2)__a - (__u64x2)__b);
1086 }
1087 
wasm_i64x2_mul(v128_t __a,v128_t __b)1088 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_mul(v128_t __a,
1089                                                            v128_t __b) {
1090   return (v128_t)((__u64x2)__a * (__u64x2)__b);
1091 }
1092 
wasm_f32x4_abs(v128_t __a)1093 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_abs(v128_t __a) {
1094   return (v128_t)__builtin_wasm_abs_f32x4((__f32x4)__a);
1095 }
1096 
wasm_f32x4_neg(v128_t __a)1097 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_neg(v128_t __a) {
1098   return (v128_t)(-(__f32x4)__a);
1099 }
1100 
wasm_f32x4_sqrt(v128_t __a)1101 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sqrt(v128_t __a) {
1102   return (v128_t)__builtin_wasm_sqrt_f32x4((__f32x4)__a);
1103 }
1104 
wasm_f32x4_ceil(v128_t __a)1105 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ceil(v128_t __a) {
1106   return (v128_t)__builtin_wasm_ceil_f32x4((__f32x4)__a);
1107 }
1108 
wasm_f32x4_floor(v128_t __a)1109 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_floor(v128_t __a) {
1110   return (v128_t)__builtin_wasm_floor_f32x4((__f32x4)__a);
1111 }
1112 
wasm_f32x4_trunc(v128_t __a)1113 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_trunc(v128_t __a) {
1114   return (v128_t)__builtin_wasm_trunc_f32x4((__f32x4)__a);
1115 }
1116 
wasm_f32x4_nearest(v128_t __a)1117 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_nearest(v128_t __a) {
1118   return (v128_t)__builtin_wasm_nearest_f32x4((__f32x4)__a);
1119 }
1120 
wasm_f32x4_add(v128_t __a,v128_t __b)1121 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_add(v128_t __a,
1122                                                            v128_t __b) {
1123   return (v128_t)((__f32x4)__a + (__f32x4)__b);
1124 }
1125 
wasm_f32x4_sub(v128_t __a,v128_t __b)1126 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sub(v128_t __a,
1127                                                            v128_t __b) {
1128   return (v128_t)((__f32x4)__a - (__f32x4)__b);
1129 }
1130 
wasm_f32x4_mul(v128_t __a,v128_t __b)1131 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_mul(v128_t __a,
1132                                                            v128_t __b) {
1133   return (v128_t)((__f32x4)__a * (__f32x4)__b);
1134 }
1135 
wasm_f32x4_div(v128_t __a,v128_t __b)1136 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_div(v128_t __a,
1137                                                            v128_t __b) {
1138   return (v128_t)((__f32x4)__a / (__f32x4)__b);
1139 }
1140 
wasm_f32x4_min(v128_t __a,v128_t __b)1141 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_min(v128_t __a,
1142                                                            v128_t __b) {
1143   return (v128_t)__builtin_wasm_min_f32x4((__f32x4)__a, (__f32x4)__b);
1144 }
1145 
wasm_f32x4_max(v128_t __a,v128_t __b)1146 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_max(v128_t __a,
1147                                                            v128_t __b) {
1148   return (v128_t)__builtin_wasm_max_f32x4((__f32x4)__a, (__f32x4)__b);
1149 }
1150 
wasm_f32x4_pmin(v128_t __a,v128_t __b)1151 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmin(v128_t __a,
1152                                                             v128_t __b) {
1153   __i32x4 __mask = (__i32x4)((__f32x4)__b < (__f32x4)__a);
1154   return (v128_t)((((__i32x4)__b) & __mask) | (((__i32x4)__a) & ~__mask));
1155 }
1156 
wasm_f32x4_pmax(v128_t __a,v128_t __b)1157 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmax(v128_t __a,
1158                                                             v128_t __b) {
1159   __i32x4 __mask = (__i32x4)((__f32x4)__a < (__f32x4)__b);
1160   return (v128_t)((((__i32x4)__b) & __mask) | (((__i32x4)__a) & ~__mask));
1161 }
1162 
wasm_f64x2_abs(v128_t __a)1163 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_abs(v128_t __a) {
1164   return (v128_t)__builtin_wasm_abs_f64x2((__f64x2)__a);
1165 }
1166 
wasm_f64x2_neg(v128_t __a)1167 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_neg(v128_t __a) {
1168   return (v128_t)(-(__f64x2)__a);
1169 }
1170 
wasm_f64x2_sqrt(v128_t __a)1171 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sqrt(v128_t __a) {
1172   return (v128_t)__builtin_wasm_sqrt_f64x2((__f64x2)__a);
1173 }
1174 
wasm_f64x2_ceil(v128_t __a)1175 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ceil(v128_t __a) {
1176   return (v128_t)__builtin_wasm_ceil_f64x2((__f64x2)__a);
1177 }
1178 
wasm_f64x2_floor(v128_t __a)1179 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_floor(v128_t __a) {
1180   return (v128_t)__builtin_wasm_floor_f64x2((__f64x2)__a);
1181 }
1182 
wasm_f64x2_trunc(v128_t __a)1183 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_trunc(v128_t __a) {
1184   return (v128_t)__builtin_wasm_trunc_f64x2((__f64x2)__a);
1185 }
1186 
wasm_f64x2_nearest(v128_t __a)1187 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_nearest(v128_t __a) {
1188   return (v128_t)__builtin_wasm_nearest_f64x2((__f64x2)__a);
1189 }
1190 
wasm_f64x2_add(v128_t __a,v128_t __b)1191 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_add(v128_t __a,
1192                                                            v128_t __b) {
1193   return (v128_t)((__f64x2)__a + (__f64x2)__b);
1194 }
1195 
wasm_f64x2_sub(v128_t __a,v128_t __b)1196 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sub(v128_t __a,
1197                                                            v128_t __b) {
1198   return (v128_t)((__f64x2)__a - (__f64x2)__b);
1199 }
1200 
wasm_f64x2_mul(v128_t __a,v128_t __b)1201 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_mul(v128_t __a,
1202                                                            v128_t __b) {
1203   return (v128_t)((__f64x2)__a * (__f64x2)__b);
1204 }
1205 
wasm_f64x2_div(v128_t __a,v128_t __b)1206 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_div(v128_t __a,
1207                                                            v128_t __b) {
1208   return (v128_t)((__f64x2)__a / (__f64x2)__b);
1209 }
1210 
wasm_f64x2_min(v128_t __a,v128_t __b)1211 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_min(v128_t __a,
1212                                                            v128_t __b) {
1213   return (v128_t)__builtin_wasm_min_f64x2((__f64x2)__a, (__f64x2)__b);
1214 }
1215 
wasm_f64x2_max(v128_t __a,v128_t __b)1216 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_max(v128_t __a,
1217                                                            v128_t __b) {
1218   return (v128_t)__builtin_wasm_max_f64x2((__f64x2)__a, (__f64x2)__b);
1219 }
1220 
wasm_f64x2_pmin(v128_t __a,v128_t __b)1221 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmin(v128_t __a,
1222                                                             v128_t __b) {
1223   __i64x2 __mask = (__i64x2)((__f64x2)__b < (__f64x2)__a);
1224   return (v128_t)((((__i64x2)__b) & __mask) | (((__i64x2)__a) & ~__mask));
1225 }
1226 
wasm_f64x2_pmax(v128_t __a,v128_t __b)1227 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmax(v128_t __a,
1228                                                             v128_t __b) {
1229   __i64x2 __mask = (__i64x2)((__f64x2)__a < (__f64x2)__b);
1230   return (v128_t)((((__i64x2)__b) & __mask) | (((__i64x2)__a) & ~__mask));
1231 }
1232 
1233 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_trunc_sat_f32x4(v128_t __a)1234 wasm_i32x4_trunc_sat_f32x4(v128_t __a) {
1235   return (v128_t)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4)__a);
1236 }
1237 
1238 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_trunc_sat_f32x4(v128_t __a)1239 wasm_u32x4_trunc_sat_f32x4(v128_t __a) {
1240   return (v128_t)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4)__a);
1241 }
1242 
1243 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_convert_i32x4(v128_t __a)1244 wasm_f32x4_convert_i32x4(v128_t __a) {
1245   return (v128_t) __builtin_convertvector((__i32x4)__a, __f32x4);
1246 }
1247 
1248 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_convert_u32x4(v128_t __a)1249 wasm_f32x4_convert_u32x4(v128_t __a) {
1250   return (v128_t) __builtin_convertvector((__u32x4)__a, __f32x4);
1251 }
1252 
1253 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_convert_low_i32x4(v128_t __a)1254 wasm_f64x2_convert_low_i32x4(v128_t __a) {
1255   return (v128_t) __builtin_convertvector((__i32x2){__a[0], __a[1]}, __f64x2);
1256 }
1257 
1258 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_convert_low_u32x4(v128_t __a)1259 wasm_f64x2_convert_low_u32x4(v128_t __a) {
1260   return (v128_t) __builtin_convertvector((__u32x2){__a[0], __a[1]}, __f64x2);
1261 }
1262 
1263 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a)1264 wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a) {
1265   return (v128_t)__builtin_wasm_trunc_sat_zero_s_f64x2_i32x4((__f64x2)__a);
1266 }
1267 
1268 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a)1269 wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a) {
1270   return (v128_t)__builtin_wasm_trunc_sat_zero_u_f64x2_i32x4((__f64x2)__a);
1271 }
1272 
1273 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_demote_f64x2_zero(v128_t __a)1274 wasm_f32x4_demote_f64x2_zero(v128_t __a) {
1275   return (v128_t) __builtin_convertvector(
1276       __builtin_shufflevector((__f64x2)__a, (__f64x2){0, 0}, 0, 1, 2, 3),
1277       __f32x4);
1278 }
1279 
1280 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_promote_low_f32x4(v128_t __a)1281 wasm_f64x2_promote_low_f32x4(v128_t __a) {
1282   return (v128_t) __builtin_convertvector(
1283       (__f32x2){((__f32x4)__a)[0], ((__f32x4)__a)[1]}, __f64x2);
1284 }
1285 
1286 #define wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1287                            __c7, __c8, __c9, __c10, __c11, __c12, __c13,       \
1288                            __c14, __c15)                                       \
1289   ((v128_t)__builtin_wasm_shuffle_i8x16(                                       \
1290       (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5,      \
1291       __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15))
1292 
1293 #define wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1294                            __c7)                                               \
1295   ((v128_t)__builtin_wasm_shuffle_i8x16(                                       \
1296       (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2,        \
1297       (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2,  \
1298       (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2,  \
1299       (__c7)*2 + 1))
1300 
1301 #define wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)                   \
1302   ((v128_t)__builtin_wasm_shuffle_i8x16(                                       \
1303       (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2,    \
1304       (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3,        \
1305       (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4,            \
1306       (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3))
1307 
1308 #define wasm_i64x2_shuffle(__a, __b, __c0, __c1)                               \
1309   ((v128_t)__builtin_wasm_shuffle_i8x16(                                       \
1310       (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2,    \
1311       (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7,    \
1312       (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4,        \
1313       (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7))
1314 
wasm_i8x16_swizzle(v128_t __a,v128_t __b)1315 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_swizzle(v128_t __a,
1316                                                                v128_t __b) {
1317   return (v128_t)__builtin_wasm_swizzle_i8x16((__i8x16)__a, (__i8x16)__b);
1318 }
1319 
1320 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_narrow_i16x8(v128_t __a,v128_t __b)1321 wasm_i8x16_narrow_i16x8(v128_t __a, v128_t __b) {
1322   return (v128_t)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8)__a,
1323                                                      (__i16x8)__b);
1324 }
1325 
1326 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_narrow_i16x8(v128_t __a,v128_t __b)1327 wasm_u8x16_narrow_i16x8(v128_t __a, v128_t __b) {
1328   return (v128_t)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8)__a,
1329                                                      (__i16x8)__b);
1330 }
1331 
1332 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_narrow_i32x4(v128_t __a,v128_t __b)1333 wasm_i16x8_narrow_i32x4(v128_t __a, v128_t __b) {
1334   return (v128_t)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4)__a,
1335                                                      (__i32x4)__b);
1336 }
1337 
1338 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_narrow_i32x4(v128_t __a,v128_t __b)1339 wasm_u16x8_narrow_i32x4(v128_t __a, v128_t __b) {
1340   return (v128_t)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4)__a,
1341                                                      (__i32x4)__b);
1342 }
1343 
1344 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extend_low_i8x16(v128_t __a)1345 wasm_i16x8_extend_low_i8x16(v128_t __a) {
1346   return (v128_t) __builtin_convertvector(
1347       (__i8x8){((__i8x16)__a)[0], ((__i8x16)__a)[1], ((__i8x16)__a)[2],
1348                ((__i8x16)__a)[3], ((__i8x16)__a)[4], ((__i8x16)__a)[5],
1349                ((__i8x16)__a)[6], ((__i8x16)__a)[7]},
1350       __i16x8);
1351 }
1352 
1353 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extend_high_i8x16(v128_t __a)1354 wasm_i16x8_extend_high_i8x16(v128_t __a) {
1355   return (v128_t) __builtin_convertvector(
1356       (__i8x8){((__i8x16)__a)[8], ((__i8x16)__a)[9], ((__i8x16)__a)[10],
1357                ((__i8x16)__a)[11], ((__i8x16)__a)[12], ((__i8x16)__a)[13],
1358                ((__i8x16)__a)[14], ((__i8x16)__a)[15]},
1359       __i16x8);
1360 }
1361 
1362 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extend_low_u8x16(v128_t __a)1363 wasm_u16x8_extend_low_u8x16(v128_t __a) {
1364   return (v128_t) __builtin_convertvector(
1365       (__u8x8){((__u8x16)__a)[0], ((__u8x16)__a)[1], ((__u8x16)__a)[2],
1366                ((__u8x16)__a)[3], ((__u8x16)__a)[4], ((__u8x16)__a)[5],
1367                ((__u8x16)__a)[6], ((__u8x16)__a)[7]},
1368       __u16x8);
1369 }
1370 
1371 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extend_high_u8x16(v128_t __a)1372 wasm_u16x8_extend_high_u8x16(v128_t __a) {
1373   return (v128_t) __builtin_convertvector(
1374       (__u8x8){((__u8x16)__a)[8], ((__u8x16)__a)[9], ((__u8x16)__a)[10],
1375                ((__u8x16)__a)[11], ((__u8x16)__a)[12], ((__u8x16)__a)[13],
1376                ((__u8x16)__a)[14], ((__u8x16)__a)[15]},
1377       __u16x8);
1378 }
1379 
1380 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extend_low_i16x8(v128_t __a)1381 wasm_i32x4_extend_low_i16x8(v128_t __a) {
1382   return (v128_t) __builtin_convertvector(
1383       (__i16x4){((__i16x8)__a)[0], ((__i16x8)__a)[1], ((__i16x8)__a)[2],
1384                 ((__i16x8)__a)[3]},
1385       __i32x4);
1386 }
1387 
1388 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extend_high_i16x8(v128_t __a)1389 wasm_i32x4_extend_high_i16x8(v128_t __a) {
1390   return (v128_t) __builtin_convertvector(
1391       (__i16x4){((__i16x8)__a)[4], ((__i16x8)__a)[5], ((__i16x8)__a)[6],
1392                 ((__i16x8)__a)[7]},
1393       __i32x4);
1394 }
1395 
1396 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extend_low_u16x8(v128_t __a)1397 wasm_u32x4_extend_low_u16x8(v128_t __a) {
1398   return (v128_t) __builtin_convertvector(
1399       (__u16x4){((__u16x8)__a)[0], ((__u16x8)__a)[1], ((__u16x8)__a)[2],
1400                 ((__u16x8)__a)[3]},
1401       __u32x4);
1402 }
1403 
1404 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extend_high_u16x8(v128_t __a)1405 wasm_u32x4_extend_high_u16x8(v128_t __a) {
1406   return (v128_t) __builtin_convertvector(
1407       (__u16x4){((__u16x8)__a)[4], ((__u16x8)__a)[5], ((__u16x8)__a)[6],
1408                 ((__u16x8)__a)[7]},
1409       __u32x4);
1410 }
1411 
1412 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extend_low_i32x4(v128_t __a)1413 wasm_i64x2_extend_low_i32x4(v128_t __a) {
1414   return (v128_t) __builtin_convertvector(
1415       (__i32x2){((__i32x4)__a)[0], ((__i32x4)__a)[1]}, __i64x2);
1416 }
1417 
1418 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extend_high_i32x4(v128_t __a)1419 wasm_i64x2_extend_high_i32x4(v128_t __a) {
1420   return (v128_t) __builtin_convertvector(
1421       (__i32x2){((__i32x4)__a)[2], ((__i32x4)__a)[3]}, __i64x2);
1422 }
1423 
1424 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extend_low_u32x4(v128_t __a)1425 wasm_u64x2_extend_low_u32x4(v128_t __a) {
1426   return (v128_t) __builtin_convertvector(
1427       (__u32x2){((__u32x4)__a)[0], ((__u32x4)__a)[1]}, __u64x2);
1428 }
1429 
1430 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extend_high_u32x4(v128_t __a)1431 wasm_u64x2_extend_high_u32x4(v128_t __a) {
1432   return (v128_t) __builtin_convertvector(
1433       (__u32x2){((__u32x4)__a)[2], ((__u32x4)__a)[3]}, __u64x2);
1434 }
1435 
1436 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extadd_pairwise_i8x16(v128_t __a)1437 wasm_i16x8_extadd_pairwise_i8x16(v128_t __a) {
1438   return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_s_i16x8((__i8x16)__a);
1439 }
1440 
1441 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extadd_pairwise_u8x16(v128_t __a)1442 wasm_u16x8_extadd_pairwise_u8x16(v128_t __a) {
1443   return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_u_i16x8((__u8x16)__a);
1444 }
1445 
1446 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extadd_pairwise_i16x8(v128_t __a)1447 wasm_i32x4_extadd_pairwise_i16x8(v128_t __a) {
1448   return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_s_i32x4((__i16x8)__a);
1449 }
1450 
1451 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extadd_pairwise_u16x8(v128_t __a)1452 wasm_u32x4_extadd_pairwise_u16x8(v128_t __a) {
1453   return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_u_i32x4((__u16x8)__a);
1454 }
1455 
1456 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extmul_low_i8x16(v128_t __a,v128_t __b)1457 wasm_i16x8_extmul_low_i8x16(v128_t __a, v128_t __b) {
1458   return (v128_t)((__i16x8)wasm_i16x8_extend_low_i8x16(__a) *
1459                   (__i16x8)wasm_i16x8_extend_low_i8x16(__b));
1460 }
1461 
1462 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_extmul_high_i8x16(v128_t __a,v128_t __b)1463 wasm_i16x8_extmul_high_i8x16(v128_t __a, v128_t __b) {
1464   return (v128_t)((__i16x8)wasm_i16x8_extend_high_i8x16(__a) *
1465                   (__i16x8)wasm_i16x8_extend_high_i8x16(__b));
1466 }
1467 
1468 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extmul_low_u8x16(v128_t __a,v128_t __b)1469 wasm_u16x8_extmul_low_u8x16(v128_t __a, v128_t __b) {
1470   return (v128_t)((__u16x8)wasm_u16x8_extend_low_u8x16(__a) *
1471                   (__u16x8)wasm_u16x8_extend_low_u8x16(__b));
1472 }
1473 
1474 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_extmul_high_u8x16(v128_t __a,v128_t __b)1475 wasm_u16x8_extmul_high_u8x16(v128_t __a, v128_t __b) {
1476   return (v128_t)((__u16x8)wasm_u16x8_extend_high_u8x16(__a) *
1477                   (__u16x8)wasm_u16x8_extend_high_u8x16(__b));
1478 }
1479 
1480 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extmul_low_i16x8(v128_t __a,v128_t __b)1481 wasm_i32x4_extmul_low_i16x8(v128_t __a, v128_t __b) {
1482   return (v128_t)((__i32x4)wasm_i32x4_extend_low_i16x8(__a) *
1483                   (__i32x4)wasm_i32x4_extend_low_i16x8(__b));
1484 }
1485 
1486 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_extmul_high_i16x8(v128_t __a,v128_t __b)1487 wasm_i32x4_extmul_high_i16x8(v128_t __a, v128_t __b) {
1488   return (v128_t)((__i32x4)wasm_i32x4_extend_high_i16x8(__a) *
1489                   (__i32x4)wasm_i32x4_extend_high_i16x8(__b));
1490 }
1491 
1492 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extmul_low_u16x8(v128_t __a,v128_t __b)1493 wasm_u32x4_extmul_low_u16x8(v128_t __a, v128_t __b) {
1494   return (v128_t)((__u32x4)wasm_u32x4_extend_low_u16x8(__a) *
1495                   (__u32x4)wasm_u32x4_extend_low_u16x8(__b));
1496 }
1497 
1498 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_extmul_high_u16x8(v128_t __a,v128_t __b)1499 wasm_u32x4_extmul_high_u16x8(v128_t __a, v128_t __b) {
1500   return (v128_t)((__u32x4)wasm_u32x4_extend_high_u16x8(__a) *
1501                   (__u32x4)wasm_u32x4_extend_high_u16x8(__b));
1502 }
1503 
1504 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extmul_low_i32x4(v128_t __a,v128_t __b)1505 wasm_i64x2_extmul_low_i32x4(v128_t __a, v128_t __b) {
1506   return (v128_t)((__i64x2)wasm_i64x2_extend_low_i32x4(__a) *
1507                   (__i64x2)wasm_i64x2_extend_low_i32x4(__b));
1508 }
1509 
1510 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_extmul_high_i32x4(v128_t __a,v128_t __b)1511 wasm_i64x2_extmul_high_i32x4(v128_t __a, v128_t __b) {
1512   return (v128_t)((__i64x2)wasm_i64x2_extend_high_i32x4(__a) *
1513                   (__i64x2)wasm_i64x2_extend_high_i32x4(__b));
1514 }
1515 
1516 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extmul_low_u32x4(v128_t __a,v128_t __b)1517 wasm_u64x2_extmul_low_u32x4(v128_t __a, v128_t __b) {
1518   return (v128_t)((__u64x2)wasm_u64x2_extend_low_u32x4(__a) *
1519                   (__u64x2)wasm_u64x2_extend_low_u32x4(__b));
1520 }
1521 
1522 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_extmul_high_u32x4(v128_t __a,v128_t __b)1523 wasm_u64x2_extmul_high_u32x4(v128_t __a, v128_t __b) {
1524   return (v128_t)((__u64x2)wasm_u64x2_extend_high_u32x4(__a) *
1525                   (__u64x2)wasm_u64x2_extend_high_u32x4(__b));
1526 }
1527 
wasm_i16x8_q15mulr_sat(v128_t __a,v128_t __b)1528 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_q15mulr_sat(v128_t __a,
1529                                                                    v128_t __b) {
1530   return (v128_t)__builtin_wasm_q15mulr_sat_s_i16x8((__i16x8)__a, (__i16x8)__b);
1531 }
1532 
1533 // Old intrinsic names supported to ease transitioning to the standard names. Do
1534 // not use these; they will be removed in the near future.
1535 
1536 #define __DEPRECATED_FN_ATTRS(__replacement)                                   \
1537   __DEFAULT_FN_ATTRS __attribute__(                                            \
1538       (deprecated("use " __replacement " instead", __replacement)))
1539 
1540 #define __WASM_STR(X) #X
1541 
1542 #ifdef __DEPRECATED
1543 #define __DEPRECATED_WASM_MACRO(__name, __replacement)                         \
1544   _Pragma(__WASM_STR(GCC warning(                                              \
1545       "'" __name "' is deprecated: use '" __replacement "' instead")))
1546 #else
1547 #define __DEPRECATED_WASM_MACRO(__name, __replacement)
1548 #endif
1549 
1550 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load8_splat")
wasm_v8x16_load_splat(const void * __mem)1551 wasm_v8x16_load_splat(const void *__mem) {
1552   return wasm_v128_load8_splat(__mem);
1553 }
1554 
1555 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load16_splat")
wasm_v16x8_load_splat(const void * __mem)1556 wasm_v16x8_load_splat(const void *__mem) {
1557   return wasm_v128_load16_splat(__mem);
1558 }
1559 
1560 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load32_splat")
wasm_v32x4_load_splat(const void * __mem)1561 wasm_v32x4_load_splat(const void *__mem) {
1562   return wasm_v128_load32_splat(__mem);
1563 }
1564 
1565 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load64_splat")
wasm_v64x2_load_splat(const void * __mem)1566 wasm_v64x2_load_splat(const void *__mem) {
1567   return wasm_v128_load64_splat(__mem);
1568 }
1569 
1570 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_load8x8")
wasm_i16x8_load_8x8(const void * __mem)1571 wasm_i16x8_load_8x8(const void *__mem) {
1572   return wasm_i16x8_load8x8(__mem);
1573 }
1574 
1575 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_load8x8")
wasm_u16x8_load_8x8(const void * __mem)1576 wasm_u16x8_load_8x8(const void *__mem) {
1577   return wasm_u16x8_load8x8(__mem);
1578 }
1579 
1580 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_load16x4")
wasm_i32x4_load_16x4(const void * __mem)1581 wasm_i32x4_load_16x4(const void *__mem) {
1582   return wasm_i32x4_load16x4(__mem);
1583 }
1584 
1585 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_load16x4")
wasm_u32x4_load_16x4(const void * __mem)1586 wasm_u32x4_load_16x4(const void *__mem) {
1587   return wasm_u32x4_load16x4(__mem);
1588 }
1589 
1590 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i64x2_load32x2")
wasm_i64x2_load_32x2(const void * __mem)1591 wasm_i64x2_load_32x2(const void *__mem) {
1592   return wasm_i64x2_load32x2(__mem);
1593 }
1594 
1595 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u64x2_load32x2")
wasm_u64x2_load_32x2(const void * __mem)1596 wasm_u64x2_load_32x2(const void *__mem) {
1597   return wasm_u64x2_load32x2(__mem);
1598 }
1599 
1600 #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1601                            __c7, __c8, __c9, __c10, __c11, __c12, __c13,       \
1602                            __c14, __c15)                                       \
1603   __DEPRECATED_WASM_MACRO("wasm_v8x16_shuffle", "wasm_i8x16_shuffle")          \
1604   wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \
1605                      __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)
1606 
1607 #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1608                            __c7)                                               \
1609   __DEPRECATED_WASM_MACRO("wasm_v16x8_shuffle", "wasm_i16x8_shuffle")          \
1610   wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7)
1611 
1612 #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)                   \
1613   __DEPRECATED_WASM_MACRO("wasm_v32x4_shuffle", "wasm_i32x4_shuffle")          \
1614   wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)
1615 
1616 #define wasm_v64x2_shuffle(__a, __b, __c0, __c1)                               \
1617   __DEPRECATED_WASM_MACRO("wasm_v64x2_shuffle", "wasm_i64x2_shuffle")          \
1618   wasm_i64x2_shuffle(__a, __b, __c0, __c1)
1619 
1620 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_swizzle")
wasm_v8x16_swizzle(v128_t __a,v128_t __b)1621 wasm_v8x16_swizzle(v128_t __a, v128_t __b) {
1622   return wasm_i8x16_swizzle(__a, __b);
1623 }
1624 
1625 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i8x16_any_true(v128_t __a)1626 wasm_i8x16_any_true(v128_t __a) {
1627   return wasm_v128_any_true(__a);
1628 }
1629 
1630 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i16x8_any_true(v128_t __a)1631 wasm_i16x8_any_true(v128_t __a) {
1632   return wasm_v128_any_true(__a);
1633 }
1634 
1635 static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
wasm_i32x4_any_true(v128_t __a)1636 wasm_i32x4_any_true(v128_t __a) {
1637   return wasm_v128_any_true(__a);
1638 }
1639 
1640 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_add_sat")
wasm_i8x16_add_saturate(v128_t __a,v128_t __b)1641 wasm_i8x16_add_saturate(v128_t __a, v128_t __b) {
1642   return wasm_i8x16_add_sat(__a, __b);
1643 }
1644 
1645 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_add_sat")
wasm_u8x16_add_saturate(v128_t __a,v128_t __b)1646 wasm_u8x16_add_saturate(v128_t __a, v128_t __b) {
1647   return wasm_u8x16_add_sat(__a, __b);
1648 }
1649 
1650 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_sub_sat")
wasm_i8x16_sub_saturate(v128_t __a,v128_t __b)1651 wasm_i8x16_sub_saturate(v128_t __a, v128_t __b) {
1652   return wasm_i8x16_sub_sat(__a, __b);
1653 }
1654 
1655 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_sub_sat")
wasm_u8x16_sub_saturate(v128_t __a,v128_t __b)1656 wasm_u8x16_sub_saturate(v128_t __a, v128_t __b) {
1657   return wasm_u8x16_sub_sat(__a, __b);
1658 }
1659 
1660 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_add_sat")
wasm_i16x8_add_saturate(v128_t __a,v128_t __b)1661 wasm_i16x8_add_saturate(v128_t __a, v128_t __b) {
1662   return wasm_i16x8_add_sat(__a, __b);
1663 }
1664 
1665 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_add_sat")
wasm_u16x8_add_saturate(v128_t __a,v128_t __b)1666 wasm_u16x8_add_saturate(v128_t __a, v128_t __b) {
1667   return wasm_u16x8_add_sat(__a, __b);
1668 }
1669 
1670 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_sub_sat")
wasm_i16x8_sub_saturate(v128_t __a,v128_t __b)1671 wasm_i16x8_sub_saturate(v128_t __a, v128_t __b) {
1672   return wasm_i16x8_sub_sat(__a, __b);
1673 }
1674 
1675 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_sub_sat")
wasm_u16x8_sub_saturate(v128_t __a,v128_t __b)1676 wasm_u16x8_sub_saturate(v128_t __a, v128_t __b) {
1677   return wasm_u16x8_sub_sat(__a, __b);
1678 }
1679 
1680 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_low_i8x16")
wasm_i16x8_widen_low_i8x16(v128_t __a)1681 wasm_i16x8_widen_low_i8x16(v128_t __a) {
1682   return wasm_i16x8_extend_low_i8x16(__a);
1683 }
1684 
1685 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_high_i8x16")
wasm_i16x8_widen_high_i8x16(v128_t __a)1686 wasm_i16x8_widen_high_i8x16(v128_t __a) {
1687   return wasm_i16x8_extend_high_i8x16(__a);
1688 }
1689 
1690 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_low_u8x16")
wasm_i16x8_widen_low_u8x16(v128_t __a)1691 wasm_i16x8_widen_low_u8x16(v128_t __a) {
1692   return wasm_u16x8_extend_low_u8x16(__a);
1693 }
1694 
1695 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_high_u8x16")
wasm_i16x8_widen_high_u8x16(v128_t __a)1696 wasm_i16x8_widen_high_u8x16(v128_t __a) {
1697   return wasm_u16x8_extend_high_u8x16(__a);
1698 }
1699 
1700 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_low_i16x8")
wasm_i32x4_widen_low_i16x8(v128_t __a)1701 wasm_i32x4_widen_low_i16x8(v128_t __a) {
1702   return wasm_i32x4_extend_low_i16x8(__a);
1703 }
1704 
1705 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_high_i16x8")
wasm_i32x4_widen_high_i16x8(v128_t __a)1706 wasm_i32x4_widen_high_i16x8(v128_t __a) {
1707   return wasm_i32x4_extend_high_i16x8(__a);
1708 }
1709 
1710 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_low_u16x8")
wasm_i32x4_widen_low_u16x8(v128_t __a)1711 wasm_i32x4_widen_low_u16x8(v128_t __a) {
1712   return wasm_u32x4_extend_low_u16x8(__a);
1713 }
1714 
1715 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_high_u16x8")
wasm_i32x4_widen_high_u16x8(v128_t __a)1716 wasm_i32x4_widen_high_u16x8(v128_t __a) {
1717   return wasm_u32x4_extend_high_u16x8(__a);
1718 }
1719 
1720 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_trunc_sat_f32x4")
wasm_i32x4_trunc_saturate_f32x4(v128_t __a)1721 wasm_i32x4_trunc_saturate_f32x4(v128_t __a) {
1722   return wasm_i32x4_trunc_sat_f32x4(__a);
1723 }
1724 
1725 static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_trunc_sat_f32x4")
wasm_u32x4_trunc_saturate_f32x4(v128_t __a)1726 wasm_u32x4_trunc_saturate_f32x4(v128_t __a) {
1727   return wasm_u32x4_trunc_sat_f32x4(__a);
1728 }
1729 
1730 // Undefine helper macros
1731 #undef __DEFAULT_FN_ATTRS
1732 #undef __DEPRECATED_FN_ATTRS
1733 
1734 #endif // __WASM_SIMD128_H
1735