1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - -x c++ %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - %s | FileCheck %s
5 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - -x c++ %s | FileCheck %s
6 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null
7 #include <arm_sve.h>
8 
9 #ifdef SVE_OVERLOADED_FORMS
10 // A simple used,unused... macro, long enough to represent any SVE builtin.
11 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
12 #else
13 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
14 #endif
15 
test_svst1_s8(svbool_t pg,int8_t * base,svint8_t data)16 void test_svst1_s8(svbool_t pg, int8_t *base, svint8_t data)
17 {
18   // CHECK-LABEL: test_svst1_s8
19   // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %base)
20   // CHECK: ret void
21   return SVE_ACLE_FUNC(svst1,_s8,,)(pg, base, data);
22 }
23 
test_svst1_s16(svbool_t pg,int16_t * base,svint16_t data)24 void test_svst1_s16(svbool_t pg, int16_t *base, svint16_t data)
25 {
26   // CHECK-LABEL: test_svst1_s16
27   // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
28   // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %base)
29   // CHECK: ret void
30   return SVE_ACLE_FUNC(svst1,_s16,,)(pg, base, data);
31 }
32 
test_svst1_s32(svbool_t pg,int32_t * base,svint32_t data)33 void test_svst1_s32(svbool_t pg, int32_t *base, svint32_t data)
34 {
35   // CHECK-LABEL: test_svst1_s32
36   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
37   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base)
38   // CHECK: ret void
39   return SVE_ACLE_FUNC(svst1,_s32,,)(pg, base, data);
40 }
41 
test_svst1_s64(svbool_t pg,int64_t * base,svint64_t data)42 void test_svst1_s64(svbool_t pg, int64_t *base, svint64_t data)
43 {
44   // CHECK-LABEL: test_svst1_s64
45   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
46   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base)
47   // CHECK: ret void
48   return SVE_ACLE_FUNC(svst1,_s64,,)(pg, base, data);
49 }
50 
test_svst1_u8(svbool_t pg,uint8_t * base,svuint8_t data)51 void test_svst1_u8(svbool_t pg, uint8_t *base, svuint8_t data)
52 {
53   // CHECK-LABEL: test_svst1_u8
54   // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %base)
55   // CHECK: ret void
56   return SVE_ACLE_FUNC(svst1,_u8,,)(pg, base, data);
57 }
58 
test_svst1_u16(svbool_t pg,uint16_t * base,svuint16_t data)59 void test_svst1_u16(svbool_t pg, uint16_t *base, svuint16_t data)
60 {
61   // CHECK-LABEL: test_svst1_u16
62   // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
63   // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %base)
64   // CHECK: ret void
65   return SVE_ACLE_FUNC(svst1,_u16,,)(pg, base, data);
66 }
67 
test_svst1_u32(svbool_t pg,uint32_t * base,svuint32_t data)68 void test_svst1_u32(svbool_t pg, uint32_t *base, svuint32_t data)
69 {
70   // CHECK-LABEL: test_svst1_u32
71   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
72   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base)
73   // CHECK: ret void
74   return SVE_ACLE_FUNC(svst1,_u32,,)(pg, base, data);
75 }
76 
test_svst1_u64(svbool_t pg,uint64_t * base,svuint64_t data)77 void test_svst1_u64(svbool_t pg, uint64_t *base, svuint64_t data)
78 {
79   // CHECK-LABEL: test_svst1_u64
80   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
81   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base)
82   // CHECK: ret void
83   return SVE_ACLE_FUNC(svst1,_u64,,)(pg, base, data);
84 }
85 
test_svst1_f16(svbool_t pg,float16_t * base,svfloat16_t data)86 void test_svst1_f16(svbool_t pg, float16_t *base, svfloat16_t data)
87 {
88   // CHECK-LABEL: test_svst1_f16
89   // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
90   // CHECK: call void @llvm.aarch64.sve.st1.nxv8f16(<vscale x 8 x half> %data, <vscale x 8 x i1> %[[PG]], half* %base)
91   // CHECK: ret void
92   return SVE_ACLE_FUNC(svst1,_f16,,)(pg, base, data);
93 }
94 
test_svst1_f32(svbool_t pg,float32_t * base,svfloat32_t data)95 void test_svst1_f32(svbool_t pg, float32_t *base, svfloat32_t data)
96 {
97   // CHECK-LABEL: test_svst1_f32
98   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
99   // CHECK: call void @llvm.aarch64.sve.st1.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base)
100   // CHECK: ret void
101   return SVE_ACLE_FUNC(svst1,_f32,,)(pg, base, data);
102 }
103 
test_svst1_f64(svbool_t pg,float64_t * base,svfloat64_t data)104 void test_svst1_f64(svbool_t pg, float64_t *base, svfloat64_t data)
105 {
106   // CHECK-LABEL: test_svst1_f64
107   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
108   // CHECK: call void @llvm.aarch64.sve.st1.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base)
109   // CHECK: ret void
110   return SVE_ACLE_FUNC(svst1,_f64,,)(pg, base, data);
111 }
112 
test_svst1_vnum_s8(svbool_t pg,int8_t * base,int64_t vnum,svint8_t data)113 void test_svst1_vnum_s8(svbool_t pg, int8_t *base, int64_t vnum, svint8_t data)
114 {
115   // CHECK-LABEL: test_svst1_vnum_s8
116   // CHECK: %[[BASE:.*]] = bitcast i8* %base to <vscale x 16 x i8>*
117   // CHECK: %[[GEP:.*]] = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %[[BASE]], i64 %vnum, i64 0
118   // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %[[GEP]])
119   // CHECK: ret void
120   return SVE_ACLE_FUNC(svst1_vnum,_s8,,)(pg, base, vnum, data);
121 }
122 
test_svst1_vnum_s16(svbool_t pg,int16_t * base,int64_t vnum,svint16_t data)123 void test_svst1_vnum_s16(svbool_t pg, int16_t *base, int64_t vnum, svint16_t data)
124 {
125   // CHECK-LABEL: test_svst1_vnum_s16
126   // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
127   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 8 x i16>*
128   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i16>, <vscale x 8 x i16>* %[[BASE]], i64 %vnum, i64 0
129   // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %[[GEP]])
130   // CHECK: ret void
131   return SVE_ACLE_FUNC(svst1_vnum,_s16,,)(pg, base, vnum, data);
132 }
133 
test_svst1_vnum_s32(svbool_t pg,int32_t * base,int64_t vnum,svint32_t data)134 void test_svst1_vnum_s32(svbool_t pg, int32_t *base, int64_t vnum, svint32_t data)
135 {
136   // CHECK-LABEL: test_svst1_vnum_s32
137   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
138   // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 4 x i32>*
139   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i32>, <vscale x 4 x i32>* %[[BASE]], i64 %vnum, i64 0
140   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %[[GEP]])
141   // CHECK: ret void
142   return SVE_ACLE_FUNC(svst1_vnum,_s32,,)(pg, base, vnum, data);
143 }
144 
test_svst1_vnum_s64(svbool_t pg,int64_t * base,int64_t vnum,svint64_t data)145 void test_svst1_vnum_s64(svbool_t pg, int64_t *base, int64_t vnum, svint64_t data)
146 {
147   // CHECK-LABEL: test_svst1_vnum_s64
148   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
149   // CHECK-DAG: %[[BASE:.*]] = bitcast i64* %base to <vscale x 2 x i64>*
150   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i64>, <vscale x 2 x i64>* %[[BASE]], i64 %vnum, i64 0
151   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %[[GEP]])
152   // CHECK: ret void
153   return SVE_ACLE_FUNC(svst1_vnum,_s64,,)(pg, base, vnum, data);
154 }
155 
test_svst1_vnum_u8(svbool_t pg,uint8_t * base,int64_t vnum,svuint8_t data)156 void test_svst1_vnum_u8(svbool_t pg, uint8_t *base, int64_t vnum, svuint8_t data)
157 {
158   // CHECK-LABEL: test_svst1_vnum_u8
159   // CHECK: %[[BASE:.*]] = bitcast i8* %base to <vscale x 16 x i8>*
160   // CHECK: %[[GEP:.*]] = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %[[BASE]], i64 %vnum, i64 0
161   // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %[[GEP]])
162   // CHECK: ret void
163   return SVE_ACLE_FUNC(svst1_vnum,_u8,,)(pg, base, vnum, data);
164 }
165 
test_svst1_vnum_u16(svbool_t pg,uint16_t * base,int64_t vnum,svuint16_t data)166 void test_svst1_vnum_u16(svbool_t pg, uint16_t *base, int64_t vnum, svuint16_t data)
167 {
168   // CHECK-LABEL: test_svst1_vnum_u16
169   // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
170   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 8 x i16>*
171   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i16>, <vscale x 8 x i16>* %[[BASE]], i64 %vnum, i64 0
172   // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %[[GEP]])
173   // CHECK: ret void
174   return SVE_ACLE_FUNC(svst1_vnum,_u16,,)(pg, base, vnum, data);
175 }
176 
test_svst1_vnum_u32(svbool_t pg,uint32_t * base,int64_t vnum,svuint32_t data)177 void test_svst1_vnum_u32(svbool_t pg, uint32_t *base, int64_t vnum, svuint32_t data)
178 {
179   // CHECK-LABEL: test_svst1_vnum_u32
180   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
181   // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 4 x i32>*
182   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i32>, <vscale x 4 x i32>* %[[BASE]], i64 %vnum, i64 0
183   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %[[GEP]])
184   // CHECK: ret void
185   return SVE_ACLE_FUNC(svst1_vnum,_u32,,)(pg, base, vnum, data);
186 }
187 
test_svst1_vnum_u64(svbool_t pg,uint64_t * base,int64_t vnum,svuint64_t data)188 void test_svst1_vnum_u64(svbool_t pg, uint64_t *base, int64_t vnum, svuint64_t data)
189 {
190   // CHECK-LABEL: test_svst1_vnum_u64
191   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
192   // CHECK-DAG: %[[BASE:.*]] = bitcast i64* %base to <vscale x 2 x i64>*
193   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i64>, <vscale x 2 x i64>* %[[BASE]], i64 %vnum, i64 0
194   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %[[GEP]])
195   // CHECK: ret void
196   return SVE_ACLE_FUNC(svst1_vnum,_u64,,)(pg, base, vnum, data);
197 }
198 
test_svst1_vnum_f16(svbool_t pg,float16_t * base,int64_t vnum,svfloat16_t data)199 void test_svst1_vnum_f16(svbool_t pg, float16_t *base, int64_t vnum, svfloat16_t data)
200 {
201   // CHECK-LABEL: test_svst1_vnum_f16
202   // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
203   // CHECK-DAG: %[[BASE:.*]] = bitcast half* %base to <vscale x 8 x half>*
204   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x half>, <vscale x 8 x half>* %[[BASE]], i64 %vnum, i64 0
205   // CHECK: call void @llvm.aarch64.sve.st1.nxv8f16(<vscale x 8 x half> %data, <vscale x 8 x i1> %[[PG]], half* %[[GEP]])
206   // CHECK: ret void
207   return SVE_ACLE_FUNC(svst1_vnum,_f16,,)(pg, base, vnum, data);
208 }
209 
test_svst1_vnum_f32(svbool_t pg,float32_t * base,int64_t vnum,svfloat32_t data)210 void test_svst1_vnum_f32(svbool_t pg, float32_t *base, int64_t vnum, svfloat32_t data)
211 {
212   // CHECK-LABEL: test_svst1_vnum_f32
213   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
214   // CHECK-DAG: %[[BASE:.*]] = bitcast float* %base to <vscale x 4 x float>*
215   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x float>, <vscale x 4 x float>* %[[BASE]], i64 %vnum, i64 0
216   // CHECK: call void @llvm.aarch64.sve.st1.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %[[GEP]])
217   // CHECK: ret void
218   return SVE_ACLE_FUNC(svst1_vnum,_f32,,)(pg, base, vnum, data);
219 }
220 
test_svst1_vnum_f64(svbool_t pg,float64_t * base,int64_t vnum,svfloat64_t data)221 void test_svst1_vnum_f64(svbool_t pg, float64_t *base, int64_t vnum, svfloat64_t data)
222 {
223   // CHECK-LABEL: test_svst1_vnum_f64
224   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
225   // CHECK-DAG: %[[BASE:.*]] = bitcast double* %base to <vscale x 2 x double>*
226   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x double>, <vscale x 2 x double>* %[[BASE]], i64 %vnum, i64 0
227   // CHECK: call void @llvm.aarch64.sve.st1.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %[[GEP]])
228   return SVE_ACLE_FUNC(svst1_vnum,_f64,,)(pg, base, vnum, data);
229 }
230 
test_svst1_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)231 void test_svst1_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
232 {
233   // CHECK-LABEL: test_svst1_scatter_u32base_s32
234   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
235   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
236   // CHECK: ret void
237   return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_s32)(pg, bases, data);
238 }
239 
test_svst1_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)240 void test_svst1_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
241 {
242   // CHECK-LABEL: test_svst1_scatter_u64base_s64
243   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
244   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
245   // CHECK: ret void
246   return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_s64)(pg, bases, data);
247 }
248 
test_svst1_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)249 void test_svst1_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
250 {
251   // CHECK-LABEL: test_svst1_scatter_u32base_u32
252   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
253   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
254   // CHECK: ret void
255   return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_u32)(pg, bases, data);
256 }
257 
test_svst1_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)258 void test_svst1_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
259 {
260   // CHECK-LABEL: test_svst1_scatter_u64base_u64
261   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
262   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
263   // CHECK: ret void
264   return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_u64)(pg, bases, data);
265 }
266 
test_svst1_scatter_u32base_f32(svbool_t pg,svuint32_t bases,svfloat32_t data)267 void test_svst1_scatter_u32base_f32(svbool_t pg, svuint32_t bases, svfloat32_t data)
268 {
269   // CHECK-LABEL: test_svst1_scatter_u32base_f32
270   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
271   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
272   // CHECK: ret void
273   return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_f32)(pg, bases, data);
274 }
275 
test_svst1_scatter_u64base_f64(svbool_t pg,svuint64_t bases,svfloat64_t data)276 void test_svst1_scatter_u64base_f64(svbool_t pg, svuint64_t bases, svfloat64_t data)
277 {
278   // CHECK-LABEL: test_svst1_scatter_u64base_f64
279   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
280   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
281   // CHECK: ret void
282   return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_f64)(pg, bases, data);
283 }
284 
test_svst1_scatter_s32offset_s32(svbool_t pg,int32_t * base,svint32_t offsets,svint32_t data)285 void test_svst1_scatter_s32offset_s32(svbool_t pg, int32_t *base, svint32_t offsets, svint32_t data)
286 {
287   // CHECK-LABEL: test_svst1_scatter_s32offset_s32
288   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
289   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
290   // CHECK: ret void
291   return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_s32)(pg, base, offsets, data);
292 }
293 
test_svst1_scatter_s64offset_s64(svbool_t pg,int64_t * base,svint64_t offsets,svint64_t data)294 void test_svst1_scatter_s64offset_s64(svbool_t pg, int64_t *base, svint64_t offsets, svint64_t data)
295 {
296   // CHECK-LABEL: test_svst1_scatter_s64offset_s64
297   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
298   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
299   // CHECK: ret void
300   return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_s64)(pg, base, offsets, data);
301 }
302 
test_svst1_scatter_s32offset_u32(svbool_t pg,uint32_t * base,svint32_t offsets,svuint32_t data)303 void test_svst1_scatter_s32offset_u32(svbool_t pg, uint32_t *base, svint32_t offsets, svuint32_t data)
304 {
305   // CHECK-LABEL: test_svst1_scatter_s32offset_u32
306   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
307   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
308   // CHECK: ret void
309   return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_u32)(pg, base, offsets, data);
310 }
311 
test_svst1_scatter_s64offset_u64(svbool_t pg,uint64_t * base,svint64_t offsets,svuint64_t data)312 void test_svst1_scatter_s64offset_u64(svbool_t pg, uint64_t *base, svint64_t offsets, svuint64_t data)
313 {
314   // CHECK-LABEL: test_svst1_scatter_s64offset_u64
315   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
316   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
317   // CHECK: ret void
318   return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_u64)(pg, base, offsets, data);
319 }
320 
test_svst1_scatter_s32offset_f32(svbool_t pg,float32_t * base,svint32_t offsets,svfloat32_t data)321 void test_svst1_scatter_s32offset_f32(svbool_t pg, float32_t *base, svint32_t offsets, svfloat32_t data)
322 {
323   // CHECK-LABEL: test_svst1_scatter_s32offset_f32
324   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
325   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %offsets)
326   // CHECK: ret void
327   return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_f32)(pg, base, offsets, data);
328 }
329 
test_svst1_scatter_s64offset_f64(svbool_t pg,float64_t * base,svint64_t offsets,svfloat64_t data)330 void test_svst1_scatter_s64offset_f64(svbool_t pg, float64_t *base, svint64_t offsets, svfloat64_t data)
331 {
332   // CHECK-LABEL: test_svst1_scatter_s64offset_f64
333   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
334   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %offsets)
335   // CHECK: ret void
336   return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_f64)(pg, base, offsets, data);
337 }
338 
test_svst1_scatter_u32offset_s32(svbool_t pg,int32_t * base,svuint32_t offsets,svint32_t data)339 void test_svst1_scatter_u32offset_s32(svbool_t pg, int32_t *base, svuint32_t offsets, svint32_t data)
340 {
341   // CHECK-LABEL: test_svst1_scatter_u32offset_s32
342   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
343   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
344   // CHECK: ret void
345   return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_s32)(pg, base, offsets, data);
346 }
347 
test_svst1_scatter_u64offset_s64(svbool_t pg,int64_t * base,svuint64_t offsets,svint64_t data)348 void test_svst1_scatter_u64offset_s64(svbool_t pg, int64_t *base, svuint64_t offsets, svint64_t data)
349 {
350   // CHECK-LABEL: test_svst1_scatter_u64offset_s64
351   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
352   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
353   // CHECK: ret void
354   return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_s64)(pg, base, offsets, data);
355 }
356 
test_svst1_scatter_u32offset_u32(svbool_t pg,uint32_t * base,svuint32_t offsets,svuint32_t data)357 void test_svst1_scatter_u32offset_u32(svbool_t pg, uint32_t *base, svuint32_t offsets, svuint32_t data)
358 {
359   // CHECK-LABEL: test_svst1_scatter_u32offset_u32
360   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
361   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
362   // CHECK: ret void
363   return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_u32)(pg, base, offsets, data);
364 }
365 
test_svst1_scatter_u64offset_u64(svbool_t pg,uint64_t * base,svuint64_t offsets,svuint64_t data)366 void test_svst1_scatter_u64offset_u64(svbool_t pg, uint64_t *base, svuint64_t offsets, svuint64_t data)
367 {
368   // CHECK-LABEL: test_svst1_scatter_u64offset_u64
369   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
370   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
371   // CHECK: ret void
372   return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_u64)(pg, base, offsets, data);
373 }
374 
test_svst1_scatter_u32offset_f32(svbool_t pg,float32_t * base,svuint32_t offsets,svfloat32_t data)375 void test_svst1_scatter_u32offset_f32(svbool_t pg, float32_t *base, svuint32_t offsets, svfloat32_t data)
376 {
377   // CHECK-LABEL: test_svst1_scatter_u32offset_f32
378   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
379   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %offsets)
380   // CHECK: ret void
381   return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_f32)(pg, base, offsets, data);
382 }
383 
test_svst1_scatter_u64offset_f64(svbool_t pg,float64_t * base,svuint64_t offsets,svfloat64_t data)384 void test_svst1_scatter_u64offset_f64(svbool_t pg, float64_t *base, svuint64_t offsets, svfloat64_t data)
385 {
386   // CHECK-LABEL: test_svst1_scatter_u64offset_f64
387   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
388   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %offsets)
389   // CHECK: ret void
390   return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_f64)(pg, base, offsets, data);
391 }
392 
test_svst1_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)393 void test_svst1_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
394 {
395   // CHECK-LABEL: test_svst1_scatter_u32base_offset_s32
396   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
397   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
398   // CHECK: ret void
399   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
400 }
401 
test_svst1_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)402 void test_svst1_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
403 {
404   // CHECK-LABEL: test_svst1_scatter_u64base_offset_s64
405   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
406   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
407   // CHECK: ret void
408   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
409 }
410 
test_svst1_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)411 void test_svst1_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
412 {
413   // CHECK-LABEL: test_svst1_scatter_u32base_offset_u32
414   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
415   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
416   // CHECK: ret void
417   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
418 }
419 
test_svst1_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)420 void test_svst1_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
421 {
422   // CHECK-LABEL: test_svst1_scatter_u64base_offset_u64
423   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
424   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
425   // CHECK: ret void
426   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
427 }
428 
test_svst1_scatter_u32base_offset_f32(svbool_t pg,svuint32_t bases,int64_t offset,svfloat32_t data)429 void test_svst1_scatter_u32base_offset_f32(svbool_t pg, svuint32_t bases, int64_t offset, svfloat32_t data)
430 {
431   // CHECK-LABEL: test_svst1_scatter_u32base_offset_f32
432   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
433   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
434   // CHECK: ret void
435   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_f32)(pg, bases, offset, data);
436 }
437 
test_svst1_scatter_u64base_offset_f64(svbool_t pg,svuint64_t bases,int64_t offset,svfloat64_t data)438 void test_svst1_scatter_u64base_offset_f64(svbool_t pg, svuint64_t bases, int64_t offset, svfloat64_t data)
439 {
440   // CHECK-LABEL: test_svst1_scatter_u64base_offset_f64
441   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
442   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
443   // CHECK: ret void
444   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_f64)(pg, bases, offset, data);
445 }
446 
test_svst1_scatter_s32index_s32(svbool_t pg,int32_t * base,svint32_t indices,svint32_t data)447 void test_svst1_scatter_s32index_s32(svbool_t pg, int32_t *base, svint32_t indices, svint32_t data)
448 {
449   // CHECK-LABEL: test_svst1_scatter_s32index_s32
450   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
451   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
452   // CHECK: ret void
453   return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_s32)(pg, base, indices, data);
454 }
455 
test_svst1_scatter_s64index_s64(svbool_t pg,int64_t * base,svint64_t indices,svint64_t data)456 void test_svst1_scatter_s64index_s64(svbool_t pg, int64_t *base, svint64_t indices, svint64_t data)
457 {
458   // CHECK-LABEL: test_svst1_scatter_s64index_s64
459   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
460   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
461   // CHECK: ret void
462   return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_s64)(pg, base, indices, data);
463 }
464 
test_svst1_scatter_s32index_u32(svbool_t pg,uint32_t * base,svint32_t indices,svuint32_t data)465 void test_svst1_scatter_s32index_u32(svbool_t pg, uint32_t *base, svint32_t indices, svuint32_t data)
466 {
467   // CHECK-LABEL: test_svst1_scatter_s32index_u32
468   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
469   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
470   // CHECK: ret void
471   return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_u32)(pg, base, indices, data);
472 }
473 
test_svst1_scatter_s64index_u64(svbool_t pg,uint64_t * base,svint64_t indices,svuint64_t data)474 void test_svst1_scatter_s64index_u64(svbool_t pg, uint64_t *base, svint64_t indices, svuint64_t data)
475 {
476   // CHECK-LABEL: test_svst1_scatter_s64index_u64
477   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
478   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
479   // CHECK: ret void
480   return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_u64)(pg, base, indices, data);
481 }
482 
test_svst1_scatter_s32index_f32(svbool_t pg,float32_t * base,svint32_t indices,svfloat32_t data)483 void test_svst1_scatter_s32index_f32(svbool_t pg, float32_t *base, svint32_t indices, svfloat32_t data)
484 {
485   // CHECK-LABEL: test_svst1_scatter_s32index_f32
486   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
487   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %indices)
488   // CHECK: ret void
489   return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_f32)(pg, base, indices, data);
490 }
491 
test_svst1_scatter_s64index_f64(svbool_t pg,float64_t * base,svint64_t indices,svfloat64_t data)492 void test_svst1_scatter_s64index_f64(svbool_t pg, float64_t *base, svint64_t indices, svfloat64_t data)
493 {
494   // CHECK-LABEL: test_svst1_scatter_s64index_f64
495   // CHECK: %[[PG:.*]]  = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
496   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %indices)
497   // CHECK: ret void
498   return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_f64)(pg, base, indices, data);
499 }
500 
test_svst1_scatter_u32index_s32(svbool_t pg,int32_t * base,svuint32_t indices,svint32_t data)501 void test_svst1_scatter_u32index_s32(svbool_t pg, int32_t *base, svuint32_t indices, svint32_t data)
502 {
503   // CHECK-LABEL: test_svst1_scatter_u32index_s32
504   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
505   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
506   // CHECK: ret void
507   return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_s32)(pg, base, indices, data);
508 }
509 
test_svst1_scatter_u64index_s64(svbool_t pg,int64_t * base,svuint64_t indices,svint64_t data)510 void test_svst1_scatter_u64index_s64(svbool_t pg, int64_t *base, svuint64_t indices, svint64_t data)
511 {
512   // CHECK-LABEL: test_svst1_scatter_u64index_s64
513   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
514   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %0, i64* %base, <vscale x 2 x i64> %indices)
515   // CHECK: ret void
516   return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_s64)(pg, base, indices, data);
517 }
518 
test_svst1_scatter_u32index_u32(svbool_t pg,uint32_t * base,svuint32_t indices,svuint32_t data)519 void test_svst1_scatter_u32index_u32(svbool_t pg, uint32_t *base, svuint32_t indices, svuint32_t data)
520 {
521   // CHECK-LABEL: test_svst1_scatter_u32index_u32
522   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
523   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
524   // CHECK: ret void
525   return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_u32)(pg, base, indices, data);
526 }
527 
test_svst1_scatter_u64index_u64(svbool_t pg,uint64_t * base,svuint64_t indices,svuint64_t data)528 void test_svst1_scatter_u64index_u64(svbool_t pg, uint64_t *base, svuint64_t indices, svuint64_t data)
529 {
530   // CHECK-LABEL: test_svst1_scatter_u64index_u64
531   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
532   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
533   // CHECK: ret void
534   return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_u64)(pg, base, indices, data);
535 }
536 
test_svst1_scatter_u32index_f32(svbool_t pg,float32_t * base,svuint32_t indices,svfloat32_t data)537 void test_svst1_scatter_u32index_f32(svbool_t pg, float32_t *base, svuint32_t indices, svfloat32_t data)
538 {
539   // CHECK-LABEL: test_svst1_scatter_u32index_f32
540   // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
541   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %indices)
542   // CHECK: ret void
543   return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_f32)(pg, base, indices, data);
544 }
545 
test_svst1_scatter_u64index_f64(svbool_t pg,float64_t * base,svuint64_t indices,svfloat64_t data)546 void test_svst1_scatter_u64index_f64(svbool_t pg, float64_t *base, svuint64_t indices, svfloat64_t data)
547 {
548   // CHECK-LABEL: test_svst1_scatter_u64index_f64
549   // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
550   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %indices)
551   // CHECK: ret void
552   return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_f64)(pg, base, indices, data);
553 }
554 
test_svst1_scatter_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index,svint32_t data)555 void test_svst1_scatter_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index, svint32_t data)
556 {
557   // CHECK-LABEL: test_svst1_scatter_u32base_index_s32
558   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
559   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
560   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
561   // CHECK: ret void
562   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_s32)(pg, bases, index, data);
563 }
564 
test_svst1_scatter_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index,svint64_t data)565 void test_svst1_scatter_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index, svint64_t data)
566 {
567   // CHECK-LABEL: test_svst1_scatter_u64base_index_s64
568   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
569   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
570   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
571   // CHECK: ret void
572   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_s64)(pg, bases, index, data);
573 }
574 
test_svst1_scatter_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index,svuint32_t data)575 void test_svst1_scatter_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index, svuint32_t data)
576 {
577   // CHECK-LABEL: test_svst1_scatter_u32base_index_u32
578   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
579   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
580   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
581   // CHECK: ret void
582   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_u32)(pg, bases, index, data);
583 }
584 
test_svst1_scatter_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index,svuint64_t data)585 void test_svst1_scatter_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index, svuint64_t data)
586 {
587   // CHECK-LABEL: test_svst1_scatter_u64base_index_u64
588   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
589   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
590   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
591   // CHECK: ret void
592   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_u64)(pg, bases, index, data);
593 }
594 
test_svst1_scatter_u32base_index_f32(svbool_t pg,svuint32_t bases,int64_t index,svfloat32_t data)595 void test_svst1_scatter_u32base_index_f32(svbool_t pg, svuint32_t bases, int64_t index, svfloat32_t data)
596 {
597   // CHECK-LABEL: test_svst1_scatter_u32base_index_f32
598   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
599   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
600   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
601   // CHECK: ret void
602   return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_f32)(pg, bases, index, data);
603 }
604 
test_svst1_scatter_u64base_index_f64(svbool_t pg,svuint64_t bases,int64_t index,svfloat64_t data)605 void test_svst1_scatter_u64base_index_f64(svbool_t pg, svuint64_t bases, int64_t index, svfloat64_t data)
606 {
607   // CHECK-LABEL: test_svst1_scatter_u64base_index_f64
608   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
609   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
610   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
611   // CHECK: ret void
612   return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_f64)(pg, bases, index, data);
613 }
614