1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -emit-llvm -o - %s | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null 2>%t
5 // RUN: FileCheck --check-prefix=ASM --allow-empty %s <%t
6
7 // If this check fails please read test/CodeGen/aarch64-sve-intrinsics/README for instructions on how to resolve it.
8 // ASM-NOT: warning
9 #include <arm_sve.h>
10
11 #ifdef SVE_OVERLOADED_FORMS
12 // A simple used,unused... macro, long enough to represent any SVE builtin.
13 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
14 #else
15 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
16 #endif
17
test_svst1_s8(svbool_t pg,int8_t * base,svint8_t data)18 void test_svst1_s8(svbool_t pg, int8_t *base, svint8_t data)
19 {
20 // CHECK-LABEL: test_svst1_s8
21 // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %base)
22 // CHECK: ret void
23 return SVE_ACLE_FUNC(svst1,_s8,,)(pg, base, data);
24 }
25
test_svst1_s16(svbool_t pg,int16_t * base,svint16_t data)26 void test_svst1_s16(svbool_t pg, int16_t *base, svint16_t data)
27 {
28 // CHECK-LABEL: test_svst1_s16
29 // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
30 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %base)
31 // CHECK: ret void
32 return SVE_ACLE_FUNC(svst1,_s16,,)(pg, base, data);
33 }
34
test_svst1_s32(svbool_t pg,int32_t * base,svint32_t data)35 void test_svst1_s32(svbool_t pg, int32_t *base, svint32_t data)
36 {
37 // CHECK-LABEL: test_svst1_s32
38 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
39 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base)
40 // CHECK: ret void
41 return SVE_ACLE_FUNC(svst1,_s32,,)(pg, base, data);
42 }
43
test_svst1_s64(svbool_t pg,int64_t * base,svint64_t data)44 void test_svst1_s64(svbool_t pg, int64_t *base, svint64_t data)
45 {
46 // CHECK-LABEL: test_svst1_s64
47 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
48 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base)
49 // CHECK: ret void
50 return SVE_ACLE_FUNC(svst1,_s64,,)(pg, base, data);
51 }
52
test_svst1_u8(svbool_t pg,uint8_t * base,svuint8_t data)53 void test_svst1_u8(svbool_t pg, uint8_t *base, svuint8_t data)
54 {
55 // CHECK-LABEL: test_svst1_u8
56 // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %base)
57 // CHECK: ret void
58 return SVE_ACLE_FUNC(svst1,_u8,,)(pg, base, data);
59 }
60
test_svst1_u16(svbool_t pg,uint16_t * base,svuint16_t data)61 void test_svst1_u16(svbool_t pg, uint16_t *base, svuint16_t data)
62 {
63 // CHECK-LABEL: test_svst1_u16
64 // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
65 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %base)
66 // CHECK: ret void
67 return SVE_ACLE_FUNC(svst1,_u16,,)(pg, base, data);
68 }
69
test_svst1_u32(svbool_t pg,uint32_t * base,svuint32_t data)70 void test_svst1_u32(svbool_t pg, uint32_t *base, svuint32_t data)
71 {
72 // CHECK-LABEL: test_svst1_u32
73 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
74 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base)
75 // CHECK: ret void
76 return SVE_ACLE_FUNC(svst1,_u32,,)(pg, base, data);
77 }
78
test_svst1_u64(svbool_t pg,uint64_t * base,svuint64_t data)79 void test_svst1_u64(svbool_t pg, uint64_t *base, svuint64_t data)
80 {
81 // CHECK-LABEL: test_svst1_u64
82 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
83 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base)
84 // CHECK: ret void
85 return SVE_ACLE_FUNC(svst1,_u64,,)(pg, base, data);
86 }
87
test_svst1_f16(svbool_t pg,float16_t * base,svfloat16_t data)88 void test_svst1_f16(svbool_t pg, float16_t *base, svfloat16_t data)
89 {
90 // CHECK-LABEL: test_svst1_f16
91 // CHECK: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
92 // CHECK: call void @llvm.aarch64.sve.st1.nxv8f16(<vscale x 8 x half> %data, <vscale x 8 x i1> %[[PG]], half* %base)
93 // CHECK: ret void
94 return SVE_ACLE_FUNC(svst1,_f16,,)(pg, base, data);
95 }
96
test_svst1_f32(svbool_t pg,float32_t * base,svfloat32_t data)97 void test_svst1_f32(svbool_t pg, float32_t *base, svfloat32_t data)
98 {
99 // CHECK-LABEL: test_svst1_f32
100 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
101 // CHECK: call void @llvm.aarch64.sve.st1.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base)
102 // CHECK: ret void
103 return SVE_ACLE_FUNC(svst1,_f32,,)(pg, base, data);
104 }
105
test_svst1_f64(svbool_t pg,float64_t * base,svfloat64_t data)106 void test_svst1_f64(svbool_t pg, float64_t *base, svfloat64_t data)
107 {
108 // CHECK-LABEL: test_svst1_f64
109 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
110 // CHECK: call void @llvm.aarch64.sve.st1.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base)
111 // CHECK: ret void
112 return SVE_ACLE_FUNC(svst1,_f64,,)(pg, base, data);
113 }
114
test_svst1_vnum_s8(svbool_t pg,int8_t * base,int64_t vnum,svint8_t data)115 void test_svst1_vnum_s8(svbool_t pg, int8_t *base, int64_t vnum, svint8_t data)
116 {
117 // CHECK-LABEL: test_svst1_vnum_s8
118 // CHECK: %[[BASE:.*]] = bitcast i8* %base to <vscale x 16 x i8>*
119 // CHECK: %[[GEP:.*]] = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %[[BASE]], i64 %vnum, i64 0
120 // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %[[GEP]])
121 // CHECK: ret void
122 return SVE_ACLE_FUNC(svst1_vnum,_s8,,)(pg, base, vnum, data);
123 }
124
test_svst1_vnum_s16(svbool_t pg,int16_t * base,int64_t vnum,svint16_t data)125 void test_svst1_vnum_s16(svbool_t pg, int16_t *base, int64_t vnum, svint16_t data)
126 {
127 // CHECK-LABEL: test_svst1_vnum_s16
128 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
129 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 8 x i16>*
130 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i16>, <vscale x 8 x i16>* %[[BASE]], i64 %vnum, i64 0
131 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %[[GEP]])
132 // CHECK: ret void
133 return SVE_ACLE_FUNC(svst1_vnum,_s16,,)(pg, base, vnum, data);
134 }
135
test_svst1_vnum_s32(svbool_t pg,int32_t * base,int64_t vnum,svint32_t data)136 void test_svst1_vnum_s32(svbool_t pg, int32_t *base, int64_t vnum, svint32_t data)
137 {
138 // CHECK-LABEL: test_svst1_vnum_s32
139 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
140 // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 4 x i32>*
141 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i32>, <vscale x 4 x i32>* %[[BASE]], i64 %vnum, i64 0
142 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %[[GEP]])
143 // CHECK: ret void
144 return SVE_ACLE_FUNC(svst1_vnum,_s32,,)(pg, base, vnum, data);
145 }
146
test_svst1_vnum_s64(svbool_t pg,int64_t * base,int64_t vnum,svint64_t data)147 void test_svst1_vnum_s64(svbool_t pg, int64_t *base, int64_t vnum, svint64_t data)
148 {
149 // CHECK-LABEL: test_svst1_vnum_s64
150 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
151 // CHECK-DAG: %[[BASE:.*]] = bitcast i64* %base to <vscale x 2 x i64>*
152 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i64>, <vscale x 2 x i64>* %[[BASE]], i64 %vnum, i64 0
153 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %[[GEP]])
154 // CHECK: ret void
155 return SVE_ACLE_FUNC(svst1_vnum,_s64,,)(pg, base, vnum, data);
156 }
157
test_svst1_vnum_u8(svbool_t pg,uint8_t * base,int64_t vnum,svuint8_t data)158 void test_svst1_vnum_u8(svbool_t pg, uint8_t *base, int64_t vnum, svuint8_t data)
159 {
160 // CHECK-LABEL: test_svst1_vnum_u8
161 // CHECK: %[[BASE:.*]] = bitcast i8* %base to <vscale x 16 x i8>*
162 // CHECK: %[[GEP:.*]] = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %[[BASE]], i64 %vnum, i64 0
163 // CHECK: call void @llvm.aarch64.sve.st1.nxv16i8(<vscale x 16 x i8> %data, <vscale x 16 x i1> %pg, i8* %[[GEP]])
164 // CHECK: ret void
165 return SVE_ACLE_FUNC(svst1_vnum,_u8,,)(pg, base, vnum, data);
166 }
167
test_svst1_vnum_u16(svbool_t pg,uint16_t * base,int64_t vnum,svuint16_t data)168 void test_svst1_vnum_u16(svbool_t pg, uint16_t *base, int64_t vnum, svuint16_t data)
169 {
170 // CHECK-LABEL: test_svst1_vnum_u16
171 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
172 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 8 x i16>*
173 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i16>, <vscale x 8 x i16>* %[[BASE]], i64 %vnum, i64 0
174 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i16(<vscale x 8 x i16> %data, <vscale x 8 x i1> %[[PG]], i16* %[[GEP]])
175 // CHECK: ret void
176 return SVE_ACLE_FUNC(svst1_vnum,_u16,,)(pg, base, vnum, data);
177 }
178
test_svst1_vnum_u32(svbool_t pg,uint32_t * base,int64_t vnum,svuint32_t data)179 void test_svst1_vnum_u32(svbool_t pg, uint32_t *base, int64_t vnum, svuint32_t data)
180 {
181 // CHECK-LABEL: test_svst1_vnum_u32
182 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
183 // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 4 x i32>*
184 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i32>, <vscale x 4 x i32>* %[[BASE]], i64 %vnum, i64 0
185 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %[[GEP]])
186 // CHECK: ret void
187 return SVE_ACLE_FUNC(svst1_vnum,_u32,,)(pg, base, vnum, data);
188 }
189
test_svst1_vnum_u64(svbool_t pg,uint64_t * base,int64_t vnum,svuint64_t data)190 void test_svst1_vnum_u64(svbool_t pg, uint64_t *base, int64_t vnum, svuint64_t data)
191 {
192 // CHECK-LABEL: test_svst1_vnum_u64
193 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
194 // CHECK-DAG: %[[BASE:.*]] = bitcast i64* %base to <vscale x 2 x i64>*
195 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i64>, <vscale x 2 x i64>* %[[BASE]], i64 %vnum, i64 0
196 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %[[GEP]])
197 // CHECK: ret void
198 return SVE_ACLE_FUNC(svst1_vnum,_u64,,)(pg, base, vnum, data);
199 }
200
test_svst1_vnum_f16(svbool_t pg,float16_t * base,int64_t vnum,svfloat16_t data)201 void test_svst1_vnum_f16(svbool_t pg, float16_t *base, int64_t vnum, svfloat16_t data)
202 {
203 // CHECK-LABEL: test_svst1_vnum_f16
204 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
205 // CHECK-DAG: %[[BASE:.*]] = bitcast half* %base to <vscale x 8 x half>*
206 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x half>, <vscale x 8 x half>* %[[BASE]], i64 %vnum, i64 0
207 // CHECK: call void @llvm.aarch64.sve.st1.nxv8f16(<vscale x 8 x half> %data, <vscale x 8 x i1> %[[PG]], half* %[[GEP]])
208 // CHECK: ret void
209 return SVE_ACLE_FUNC(svst1_vnum,_f16,,)(pg, base, vnum, data);
210 }
211
test_svst1_vnum_f32(svbool_t pg,float32_t * base,int64_t vnum,svfloat32_t data)212 void test_svst1_vnum_f32(svbool_t pg, float32_t *base, int64_t vnum, svfloat32_t data)
213 {
214 // CHECK-LABEL: test_svst1_vnum_f32
215 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
216 // CHECK-DAG: %[[BASE:.*]] = bitcast float* %base to <vscale x 4 x float>*
217 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x float>, <vscale x 4 x float>* %[[BASE]], i64 %vnum, i64 0
218 // CHECK: call void @llvm.aarch64.sve.st1.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %[[GEP]])
219 // CHECK: ret void
220 return SVE_ACLE_FUNC(svst1_vnum,_f32,,)(pg, base, vnum, data);
221 }
222
test_svst1_vnum_f64(svbool_t pg,float64_t * base,int64_t vnum,svfloat64_t data)223 void test_svst1_vnum_f64(svbool_t pg, float64_t *base, int64_t vnum, svfloat64_t data)
224 {
225 // CHECK-LABEL: test_svst1_vnum_f64
226 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
227 // CHECK-DAG: %[[BASE:.*]] = bitcast double* %base to <vscale x 2 x double>*
228 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x double>, <vscale x 2 x double>* %[[BASE]], i64 %vnum, i64 0
229 // CHECK: call void @llvm.aarch64.sve.st1.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %[[GEP]])
230 return SVE_ACLE_FUNC(svst1_vnum,_f64,,)(pg, base, vnum, data);
231 }
232
test_svst1_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)233 void test_svst1_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
234 {
235 // CHECK-LABEL: test_svst1_scatter_u32base_s32
236 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
237 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
238 // CHECK: ret void
239 return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_s32)(pg, bases, data);
240 }
241
test_svst1_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)242 void test_svst1_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
243 {
244 // CHECK-LABEL: test_svst1_scatter_u64base_s64
245 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
246 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
247 // CHECK: ret void
248 return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_s64)(pg, bases, data);
249 }
250
test_svst1_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)251 void test_svst1_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
252 {
253 // CHECK-LABEL: test_svst1_scatter_u32base_u32
254 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
255 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
256 // CHECK: ret void
257 return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_u32)(pg, bases, data);
258 }
259
test_svst1_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)260 void test_svst1_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
261 {
262 // CHECK-LABEL: test_svst1_scatter_u64base_u64
263 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
264 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
265 // CHECK: ret void
266 return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_u64)(pg, bases, data);
267 }
268
test_svst1_scatter_u32base_f32(svbool_t pg,svuint32_t bases,svfloat32_t data)269 void test_svst1_scatter_u32base_f32(svbool_t pg, svuint32_t bases, svfloat32_t data)
270 {
271 // CHECK-LABEL: test_svst1_scatter_u32base_f32
272 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
273 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
274 // CHECK: ret void
275 return SVE_ACLE_FUNC(svst1_scatter,_u32base,,_f32)(pg, bases, data);
276 }
277
test_svst1_scatter_u64base_f64(svbool_t pg,svuint64_t bases,svfloat64_t data)278 void test_svst1_scatter_u64base_f64(svbool_t pg, svuint64_t bases, svfloat64_t data)
279 {
280 // CHECK-LABEL: test_svst1_scatter_u64base_f64
281 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
282 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
283 // CHECK: ret void
284 return SVE_ACLE_FUNC(svst1_scatter,_u64base,,_f64)(pg, bases, data);
285 }
286
test_svst1_scatter_s32offset_s32(svbool_t pg,int32_t * base,svint32_t offsets,svint32_t data)287 void test_svst1_scatter_s32offset_s32(svbool_t pg, int32_t *base, svint32_t offsets, svint32_t data)
288 {
289 // CHECK-LABEL: test_svst1_scatter_s32offset_s32
290 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
291 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
292 // CHECK: ret void
293 return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_s32)(pg, base, offsets, data);
294 }
295
test_svst1_scatter_s64offset_s64(svbool_t pg,int64_t * base,svint64_t offsets,svint64_t data)296 void test_svst1_scatter_s64offset_s64(svbool_t pg, int64_t *base, svint64_t offsets, svint64_t data)
297 {
298 // CHECK-LABEL: test_svst1_scatter_s64offset_s64
299 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
300 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
301 // CHECK: ret void
302 return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_s64)(pg, base, offsets, data);
303 }
304
test_svst1_scatter_s32offset_u32(svbool_t pg,uint32_t * base,svint32_t offsets,svuint32_t data)305 void test_svst1_scatter_s32offset_u32(svbool_t pg, uint32_t *base, svint32_t offsets, svuint32_t data)
306 {
307 // CHECK-LABEL: test_svst1_scatter_s32offset_u32
308 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
309 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
310 // CHECK: ret void
311 return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_u32)(pg, base, offsets, data);
312 }
313
test_svst1_scatter_s64offset_u64(svbool_t pg,uint64_t * base,svint64_t offsets,svuint64_t data)314 void test_svst1_scatter_s64offset_u64(svbool_t pg, uint64_t *base, svint64_t offsets, svuint64_t data)
315 {
316 // CHECK-LABEL: test_svst1_scatter_s64offset_u64
317 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
318 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
319 // CHECK: ret void
320 return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_u64)(pg, base, offsets, data);
321 }
322
test_svst1_scatter_s32offset_f32(svbool_t pg,float32_t * base,svint32_t offsets,svfloat32_t data)323 void test_svst1_scatter_s32offset_f32(svbool_t pg, float32_t *base, svint32_t offsets, svfloat32_t data)
324 {
325 // CHECK-LABEL: test_svst1_scatter_s32offset_f32
326 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
327 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %offsets)
328 // CHECK: ret void
329 return SVE_ACLE_FUNC(svst1_scatter_,s32,offset,_f32)(pg, base, offsets, data);
330 }
331
test_svst1_scatter_s64offset_f64(svbool_t pg,float64_t * base,svint64_t offsets,svfloat64_t data)332 void test_svst1_scatter_s64offset_f64(svbool_t pg, float64_t *base, svint64_t offsets, svfloat64_t data)
333 {
334 // CHECK-LABEL: test_svst1_scatter_s64offset_f64
335 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
336 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %offsets)
337 // CHECK: ret void
338 return SVE_ACLE_FUNC(svst1_scatter_,s64,offset,_f64)(pg, base, offsets, data);
339 }
340
test_svst1_scatter_u32offset_s32(svbool_t pg,int32_t * base,svuint32_t offsets,svint32_t data)341 void test_svst1_scatter_u32offset_s32(svbool_t pg, int32_t *base, svuint32_t offsets, svint32_t data)
342 {
343 // CHECK-LABEL: test_svst1_scatter_u32offset_s32
344 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
345 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
346 // CHECK: ret void
347 return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_s32)(pg, base, offsets, data);
348 }
349
test_svst1_scatter_u64offset_s64(svbool_t pg,int64_t * base,svuint64_t offsets,svint64_t data)350 void test_svst1_scatter_u64offset_s64(svbool_t pg, int64_t *base, svuint64_t offsets, svint64_t data)
351 {
352 // CHECK-LABEL: test_svst1_scatter_u64offset_s64
353 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
354 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
355 // CHECK: ret void
356 return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_s64)(pg, base, offsets, data);
357 }
358
test_svst1_scatter_u32offset_u32(svbool_t pg,uint32_t * base,svuint32_t offsets,svuint32_t data)359 void test_svst1_scatter_u32offset_u32(svbool_t pg, uint32_t *base, svuint32_t offsets, svuint32_t data)
360 {
361 // CHECK-LABEL: test_svst1_scatter_u32offset_u32
362 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
363 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %offsets)
364 // CHECK: ret void
365 return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_u32)(pg, base, offsets, data);
366 }
367
test_svst1_scatter_u64offset_u64(svbool_t pg,uint64_t * base,svuint64_t offsets,svuint64_t data)368 void test_svst1_scatter_u64offset_u64(svbool_t pg, uint64_t *base, svuint64_t offsets, svuint64_t data)
369 {
370 // CHECK-LABEL: test_svst1_scatter_u64offset_u64
371 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
372 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %offsets)
373 // CHECK: ret void
374 return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_u64)(pg, base, offsets, data);
375 }
376
test_svst1_scatter_u32offset_f32(svbool_t pg,float32_t * base,svuint32_t offsets,svfloat32_t data)377 void test_svst1_scatter_u32offset_f32(svbool_t pg, float32_t *base, svuint32_t offsets, svfloat32_t data)
378 {
379 // CHECK-LABEL: test_svst1_scatter_u32offset_f32
380 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
381 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %offsets)
382 // CHECK: ret void
383 return SVE_ACLE_FUNC(svst1_scatter_,u32,offset,_f32)(pg, base, offsets, data);
384 }
385
test_svst1_scatter_u64offset_f64(svbool_t pg,float64_t * base,svuint64_t offsets,svfloat64_t data)386 void test_svst1_scatter_u64offset_f64(svbool_t pg, float64_t *base, svuint64_t offsets, svfloat64_t data)
387 {
388 // CHECK-LABEL: test_svst1_scatter_u64offset_f64
389 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
390 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %offsets)
391 // CHECK: ret void
392 return SVE_ACLE_FUNC(svst1_scatter_,u64,offset,_f64)(pg, base, offsets, data);
393 }
394
test_svst1_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)395 void test_svst1_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
396 {
397 // CHECK-LABEL: test_svst1_scatter_u32base_offset_s32
398 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
399 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
400 // CHECK: ret void
401 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
402 }
403
test_svst1_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)404 void test_svst1_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
405 {
406 // CHECK-LABEL: test_svst1_scatter_u64base_offset_s64
407 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
408 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
409 // CHECK: ret void
410 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
411 }
412
test_svst1_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)413 void test_svst1_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
414 {
415 // CHECK-LABEL: test_svst1_scatter_u32base_offset_u32
416 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
417 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
418 // CHECK: ret void
419 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
420 }
421
test_svst1_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)422 void test_svst1_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
423 {
424 // CHECK-LABEL: test_svst1_scatter_u64base_offset_u64
425 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
426 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
427 // CHECK: ret void
428 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
429 }
430
test_svst1_scatter_u32base_offset_f32(svbool_t pg,svuint32_t bases,int64_t offset,svfloat32_t data)431 void test_svst1_scatter_u32base_offset_f32(svbool_t pg, svuint32_t bases, int64_t offset, svfloat32_t data)
432 {
433 // CHECK-LABEL: test_svst1_scatter_u32base_offset_f32
434 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
435 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
436 // CHECK: ret void
437 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_offset,_f32)(pg, bases, offset, data);
438 }
439
test_svst1_scatter_u64base_offset_f64(svbool_t pg,svuint64_t bases,int64_t offset,svfloat64_t data)440 void test_svst1_scatter_u64base_offset_f64(svbool_t pg, svuint64_t bases, int64_t offset, svfloat64_t data)
441 {
442 // CHECK-LABEL: test_svst1_scatter_u64base_offset_f64
443 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
444 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
445 // CHECK: ret void
446 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_offset,_f64)(pg, bases, offset, data);
447 }
448
test_svst1_scatter_s32index_s32(svbool_t pg,int32_t * base,svint32_t indices,svint32_t data)449 void test_svst1_scatter_s32index_s32(svbool_t pg, int32_t *base, svint32_t indices, svint32_t data)
450 {
451 // CHECK-LABEL: test_svst1_scatter_s32index_s32
452 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
453 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
454 // CHECK: ret void
455 return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_s32)(pg, base, indices, data);
456 }
457
test_svst1_scatter_s64index_s64(svbool_t pg,int64_t * base,svint64_t indices,svint64_t data)458 void test_svst1_scatter_s64index_s64(svbool_t pg, int64_t *base, svint64_t indices, svint64_t data)
459 {
460 // CHECK-LABEL: test_svst1_scatter_s64index_s64
461 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
462 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
463 // CHECK: ret void
464 return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_s64)(pg, base, indices, data);
465 }
466
test_svst1_scatter_s32index_u32(svbool_t pg,uint32_t * base,svint32_t indices,svuint32_t data)467 void test_svst1_scatter_s32index_u32(svbool_t pg, uint32_t *base, svint32_t indices, svuint32_t data)
468 {
469 // CHECK-LABEL: test_svst1_scatter_s32index_u32
470 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
471 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
472 // CHECK: ret void
473 return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_u32)(pg, base, indices, data);
474 }
475
test_svst1_scatter_s64index_u64(svbool_t pg,uint64_t * base,svint64_t indices,svuint64_t data)476 void test_svst1_scatter_s64index_u64(svbool_t pg, uint64_t *base, svint64_t indices, svuint64_t data)
477 {
478 // CHECK-LABEL: test_svst1_scatter_s64index_u64
479 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
480 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
481 // CHECK: ret void
482 return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_u64)(pg, base, indices, data);
483 }
484
test_svst1_scatter_s32index_f32(svbool_t pg,float32_t * base,svint32_t indices,svfloat32_t data)485 void test_svst1_scatter_s32index_f32(svbool_t pg, float32_t *base, svint32_t indices, svfloat32_t data)
486 {
487 // CHECK-LABEL: test_svst1_scatter_s32index_f32
488 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
489 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %indices)
490 // CHECK: ret void
491 return SVE_ACLE_FUNC(svst1_scatter_,s32,index,_f32)(pg, base, indices, data);
492 }
493
test_svst1_scatter_s64index_f64(svbool_t pg,float64_t * base,svint64_t indices,svfloat64_t data)494 void test_svst1_scatter_s64index_f64(svbool_t pg, float64_t *base, svint64_t indices, svfloat64_t data)
495 {
496 // CHECK-LABEL: test_svst1_scatter_s64index_f64
497 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
498 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %indices)
499 // CHECK: ret void
500 return SVE_ACLE_FUNC(svst1_scatter_,s64,index,_f64)(pg, base, indices, data);
501 }
502
test_svst1_scatter_u32index_s32(svbool_t pg,int32_t * base,svuint32_t indices,svint32_t data)503 void test_svst1_scatter_u32index_s32(svbool_t pg, int32_t *base, svuint32_t indices, svint32_t data)
504 {
505 // CHECK-LABEL: test_svst1_scatter_u32index_s32
506 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
507 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
508 // CHECK: ret void
509 return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_s32)(pg, base, indices, data);
510 }
511
test_svst1_scatter_u64index_s64(svbool_t pg,int64_t * base,svuint64_t indices,svint64_t data)512 void test_svst1_scatter_u64index_s64(svbool_t pg, int64_t *base, svuint64_t indices, svint64_t data)
513 {
514 // CHECK-LABEL: test_svst1_scatter_u64index_s64
515 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
516 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %0, i64* %base, <vscale x 2 x i64> %indices)
517 // CHECK: ret void
518 return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_s64)(pg, base, indices, data);
519 }
520
test_svst1_scatter_u32index_u32(svbool_t pg,uint32_t * base,svuint32_t indices,svuint32_t data)521 void test_svst1_scatter_u32index_u32(svbool_t pg, uint32_t *base, svuint32_t indices, svuint32_t data)
522 {
523 // CHECK-LABEL: test_svst1_scatter_u32index_u32
524 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
525 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], i32* %base, <vscale x 4 x i32> %indices)
526 // CHECK: ret void
527 return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_u32)(pg, base, indices, data);
528 }
529
test_svst1_scatter_u64index_u64(svbool_t pg,uint64_t * base,svuint64_t indices,svuint64_t data)530 void test_svst1_scatter_u64index_u64(svbool_t pg, uint64_t *base, svuint64_t indices, svuint64_t data)
531 {
532 // CHECK-LABEL: test_svst1_scatter_u64index_u64
533 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
534 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], i64* %base, <vscale x 2 x i64> %indices)
535 // CHECK: ret void
536 return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_u64)(pg, base, indices, data);
537 }
538
test_svst1_scatter_u32index_f32(svbool_t pg,float32_t * base,svuint32_t indices,svfloat32_t data)539 void test_svst1_scatter_u32index_f32(svbool_t pg, float32_t *base, svuint32_t indices, svfloat32_t data)
540 {
541 // CHECK-LABEL: test_svst1_scatter_u32index_f32
542 // CHECK: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
543 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4f32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], float* %base, <vscale x 4 x i32> %indices)
544 // CHECK: ret void
545 return SVE_ACLE_FUNC(svst1_scatter_,u32,index,_f32)(pg, base, indices, data);
546 }
547
test_svst1_scatter_u64index_f64(svbool_t pg,float64_t * base,svuint64_t indices,svfloat64_t data)548 void test_svst1_scatter_u64index_f64(svbool_t pg, float64_t *base, svuint64_t indices, svfloat64_t data)
549 {
550 // CHECK-LABEL: test_svst1_scatter_u64index_f64
551 // CHECK: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
552 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2f64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], double* %base, <vscale x 2 x i64> %indices)
553 // CHECK: ret void
554 return SVE_ACLE_FUNC(svst1_scatter_,u64,index,_f64)(pg, base, indices, data);
555 }
556
test_svst1_scatter_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index,svint32_t data)557 void test_svst1_scatter_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index, svint32_t data)
558 {
559 // CHECK-LABEL: test_svst1_scatter_u32base_index_s32
560 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
561 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
562 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
563 // CHECK: ret void
564 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_s32)(pg, bases, index, data);
565 }
566
test_svst1_scatter_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index,svint64_t data)567 void test_svst1_scatter_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index, svint64_t data)
568 {
569 // CHECK-LABEL: test_svst1_scatter_u64base_index_s64
570 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
571 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
572 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
573 // CHECK: ret void
574 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_s64)(pg, bases, index, data);
575 }
576
test_svst1_scatter_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index,svuint32_t data)577 void test_svst1_scatter_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index, svuint32_t data)
578 {
579 // CHECK-LABEL: test_svst1_scatter_u32base_index_u32
580 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
581 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
582 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i32> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
583 // CHECK: ret void
584 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_u32)(pg, bases, index, data);
585 }
586
test_svst1_scatter_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index,svuint64_t data)587 void test_svst1_scatter_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index, svuint64_t data)
588 {
589 // CHECK-LABEL: test_svst1_scatter_u64base_index_u64
590 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
591 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
592 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i64> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
593 // CHECK: ret void
594 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_u64)(pg, bases, index, data);
595 }
596
test_svst1_scatter_u32base_index_f32(svbool_t pg,svuint32_t bases,int64_t index,svfloat32_t data)597 void test_svst1_scatter_u32base_index_f32(svbool_t pg, svuint32_t bases, int64_t index, svfloat32_t data)
598 {
599 // CHECK-LABEL: test_svst1_scatter_u32base_index_f32
600 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
601 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
602 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x float> %data, <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
603 // CHECK: ret void
604 return SVE_ACLE_FUNC(svst1_scatter,_u32base,_index,_f32)(pg, bases, index, data);
605 }
606
test_svst1_scatter_u64base_index_f64(svbool_t pg,svuint64_t bases,int64_t index,svfloat64_t data)607 void test_svst1_scatter_u64base_index_f64(svbool_t pg, svuint64_t bases, int64_t index, svfloat64_t data)
608 {
609 // CHECK-LABEL: test_svst1_scatter_u64base_index_f64
610 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
611 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 3
612 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x double> %data, <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
613 // CHECK: ret void
614 return SVE_ACLE_FUNC(svst1_scatter,_u64base,_index,_f64)(pg, bases, index, data);
615 }
616