1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null 2>%t
5 // RUN: FileCheck --check-prefix=ASM --allow-empty %s <%t
6
7 // If this check fails please read test/CodeGen/aarch64-sve-intrinsics/README for instructions on how to resolve it.
8 // ASM-NOT: warning
9 #include <arm_sve.h>
10
11 #ifdef SVE_OVERLOADED_FORMS
12 // A simple used,unused... macro, long enough to represent any SVE builtin.
13 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
14 #else
15 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
16 #endif
17
test_svst1b_s16(svbool_t pg,int8_t * base,svint16_t data)18 void test_svst1b_s16(svbool_t pg, int8_t *base, svint16_t data)
19 {
20 // CHECK-LABEL: test_svst1b_s16
21 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
22 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
23 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %base)
24 // CHECK: ret void
25 return SVE_ACLE_FUNC(svst1b,_s16,,)(pg, base, data);
26 }
27
test_svst1b_s32(svbool_t pg,int8_t * base,svint32_t data)28 void test_svst1b_s32(svbool_t pg, int8_t *base, svint32_t data)
29 {
30 // CHECK-LABEL: test_svst1b_s32
31 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
32 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
33 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %base)
34 // CHECK: ret void
35 return SVE_ACLE_FUNC(svst1b,_s32,,)(pg, base, data);
36 }
37
test_svst1b_s64(svbool_t pg,int8_t * base,svint64_t data)38 void test_svst1b_s64(svbool_t pg, int8_t *base, svint64_t data)
39 {
40 // CHECK-LABEL: test_svst1b_s64
41 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
42 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
43 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %base)
44 // CHECK: ret void
45 return SVE_ACLE_FUNC(svst1b,_s64,,)(pg, base, data);
46 }
47
test_svst1b_u16(svbool_t pg,uint8_t * base,svuint16_t data)48 void test_svst1b_u16(svbool_t pg, uint8_t *base, svuint16_t data)
49 {
50 // CHECK-LABEL: test_svst1b_u16
51 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
52 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
53 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %base)
54 // CHECK: ret void
55 return SVE_ACLE_FUNC(svst1b,_u16,,)(pg, base, data);
56 }
57
test_svst1b_u32(svbool_t pg,uint8_t * base,svuint32_t data)58 void test_svst1b_u32(svbool_t pg, uint8_t *base, svuint32_t data)
59 {
60 // CHECK-LABEL: test_svst1b_u32
61 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
62 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
63 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %base)
64 // CHECK: ret void
65 return SVE_ACLE_FUNC(svst1b,_u32,,)(pg, base, data);
66 }
67
test_svst1b_u64(svbool_t pg,uint8_t * base,svuint64_t data)68 void test_svst1b_u64(svbool_t pg, uint8_t *base, svuint64_t data)
69 {
70 // CHECK-LABEL: test_svst1b_u64
71 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
72 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
73 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %base)
74 // CHECK: ret void
75 return SVE_ACLE_FUNC(svst1b,_u64,,)(pg, base, data);
76 }
77
test_svst1b_vnum_s16(svbool_t pg,int8_t * base,int64_t vnum,svint16_t data)78 void test_svst1b_vnum_s16(svbool_t pg, int8_t *base, int64_t vnum, svint16_t data)
79 {
80 // CHECK-LABEL: test_svst1b_vnum_s16
81 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
82 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 8 x i8>*
83 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
84 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i8>, <vscale x 8 x i8>* %[[BASE]], i64 %vnum, i64 0
85 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %[[GEP]])
86 // CHECK: ret void
87 return SVE_ACLE_FUNC(svst1b_vnum,_s16,,)(pg, base, vnum, data);
88 }
89
test_svst1b_vnum_s32(svbool_t pg,int8_t * base,int64_t vnum,svint32_t data)90 void test_svst1b_vnum_s32(svbool_t pg, int8_t *base, int64_t vnum, svint32_t data)
91 {
92 // CHECK-LABEL: test_svst1b_vnum_s32
93 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
94 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 4 x i8>*
95 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
96 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i8>, <vscale x 4 x i8>* %[[BASE]], i64 %vnum, i64 0
97 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %[[GEP]])
98 // CHECK: ret void
99 return SVE_ACLE_FUNC(svst1b_vnum,_s32,,)(pg, base, vnum, data);
100 }
101
test_svst1b_vnum_s64(svbool_t pg,int8_t * base,int64_t vnum,svint64_t data)102 void test_svst1b_vnum_s64(svbool_t pg, int8_t *base, int64_t vnum, svint64_t data)
103 {
104 // CHECK-LABEL: test_svst1b_vnum_s64
105 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
106 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 2 x i8>*
107 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
108 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i8>, <vscale x 2 x i8>* %[[BASE]], i64 %vnum, i64 0
109 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %[[GEP]])
110 // CHECK: ret void
111 return SVE_ACLE_FUNC(svst1b_vnum,_s64,,)(pg, base, vnum, data);
112 }
113
test_svst1b_vnum_u16(svbool_t pg,uint8_t * base,int64_t vnum,svuint16_t data)114 void test_svst1b_vnum_u16(svbool_t pg, uint8_t *base, int64_t vnum, svuint16_t data)
115 {
116 // CHECK-LABEL: test_svst1b_vnum_u16
117 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
118 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 8 x i8>*
119 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
120 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i8>, <vscale x 8 x i8>* %[[BASE]], i64 %vnum, i64 0
121 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %[[GEP]])
122 // CHECK: ret void
123 return SVE_ACLE_FUNC(svst1b_vnum,_u16,,)(pg, base, vnum, data);
124 }
125
test_svst1b_vnum_u32(svbool_t pg,uint8_t * base,int64_t vnum,svuint32_t data)126 void test_svst1b_vnum_u32(svbool_t pg, uint8_t *base, int64_t vnum, svuint32_t data)
127 {
128 // CHECK-LABEL: test_svst1b_vnum_u32
129 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
130 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 4 x i8>*
131 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
132 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i8>, <vscale x 4 x i8>* %[[BASE]], i64 %vnum, i64 0
133 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %[[GEP]])
134 // CHECK: ret void
135 return SVE_ACLE_FUNC(svst1b_vnum,_u32,,)(pg, base, vnum, data);
136 }
137
test_svst1b_vnum_u64(svbool_t pg,uint8_t * base,int64_t vnum,svuint64_t data)138 void test_svst1b_vnum_u64(svbool_t pg, uint8_t *base, int64_t vnum, svuint64_t data)
139 {
140 // CHECK-LABEL: test_svst1b_vnum_u64
141 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
142 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 2 x i8>*
143 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
144 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i8>, <vscale x 2 x i8>* %[[BASE]], i64 %vnum, i64 0
145 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %[[GEP]])
146 // CHECK: ret void
147 return SVE_ACLE_FUNC(svst1b_vnum,_u64,,)(pg, base, vnum, data);
148 }
149
test_svst1b_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)150 void test_svst1b_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
151 {
152 // CHECK-LABEL: test_svst1b_scatter_u32base_s32
153 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
154 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
155 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
156 // CHECK: ret void
157 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,,_s32)(pg, bases, data);
158 }
159
test_svst1b_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)160 void test_svst1b_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
161 {
162 // CHECK-LABEL: test_svst1b_scatter_u64base_s64
163 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
164 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
165 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
166 // CHECK: ret void
167 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,,_s64)(pg, bases, data);
168 }
169
test_svst1b_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)170 void test_svst1b_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
171 {
172 // CHECK-LABEL: test_svst1b_scatter_u32base_u32
173 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
174 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
175 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
176 // CHECK: ret void
177 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,,_u32)(pg, bases, data);
178 }
179
test_svst1b_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)180 void test_svst1b_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
181 {
182 // CHECK-LABEL: test_svst1b_scatter_u64base_u64
183 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
184 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
185 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
186 // CHECK: ret void
187 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,,_u64)(pg, bases, data);
188 }
189
test_svst1b_scatter_s32offset_s32(svbool_t pg,int8_t * base,svint32_t offsets,svint32_t data)190 void test_svst1b_scatter_s32offset_s32(svbool_t pg, int8_t *base, svint32_t offsets, svint32_t data)
191 {
192 // CHECK-LABEL: test_svst1b_scatter_s32offset_s32
193 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
194 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
195 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
196 // CHECK: ret void
197 return SVE_ACLE_FUNC(svst1b_scatter_,s32,offset,_s32)(pg, base, offsets, data);
198 }
199
test_svst1b_scatter_s64offset_s64(svbool_t pg,int8_t * base,svint64_t offsets,svint64_t data)200 void test_svst1b_scatter_s64offset_s64(svbool_t pg, int8_t *base, svint64_t offsets, svint64_t data)
201 {
202 // CHECK-LABEL: test_svst1b_scatter_s64offset_s64
203 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
204 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
205 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
206 // CHECK: ret void
207 return SVE_ACLE_FUNC(svst1b_scatter_,s64,offset,_s64)(pg, base, offsets, data);
208 }
209
test_svst1b_scatter_s32offset_u32(svbool_t pg,uint8_t * base,svint32_t offsets,svuint32_t data)210 void test_svst1b_scatter_s32offset_u32(svbool_t pg, uint8_t *base, svint32_t offsets, svuint32_t data)
211 {
212 // CHECK-LABEL: test_svst1b_scatter_s32offset_u32
213 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
214 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
215 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
216 // CHECK: ret void
217 return SVE_ACLE_FUNC(svst1b_scatter_,s32,offset,_u32)(pg, base, offsets, data);
218 }
219
test_svst1b_scatter_s64offset_u64(svbool_t pg,uint8_t * base,svint64_t offsets,svuint64_t data)220 void test_svst1b_scatter_s64offset_u64(svbool_t pg, uint8_t *base, svint64_t offsets, svuint64_t data)
221 {
222 // CHECK-LABEL: test_svst1b_scatter_s64offset_u64
223 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
224 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
225 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
226 // CHECK: ret void
227 return SVE_ACLE_FUNC(svst1b_scatter_,s64,offset,_u64)(pg, base, offsets, data);
228 }
229
test_svst1b_scatter_u32offset_s32(svbool_t pg,int8_t * base,svuint32_t offsets,svint32_t data)230 void test_svst1b_scatter_u32offset_s32(svbool_t pg, int8_t *base, svuint32_t offsets, svint32_t data)
231 {
232 // CHECK-LABEL: test_svst1b_scatter_u32offset_s32
233 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
234 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
235 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
236 // CHECK: ret void
237 return SVE_ACLE_FUNC(svst1b_scatter_,u32,offset,_s32)(pg, base, offsets, data);
238 }
239
test_svst1b_scatter_u64offset_s64(svbool_t pg,int8_t * base,svuint64_t offsets,svint64_t data)240 void test_svst1b_scatter_u64offset_s64(svbool_t pg, int8_t *base, svuint64_t offsets, svint64_t data)
241 {
242 // CHECK-LABEL: test_svst1b_scatter_u64offset_s64
243 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
244 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
245 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
246 // CHECK: ret void
247 return SVE_ACLE_FUNC(svst1b_scatter_,u64,offset,_s64)(pg, base, offsets, data);
248 }
249
test_svst1b_scatter_u32offset_u32(svbool_t pg,uint8_t * base,svuint32_t offsets,svuint32_t data)250 void test_svst1b_scatter_u32offset_u32(svbool_t pg, uint8_t *base, svuint32_t offsets, svuint32_t data)
251 {
252 // CHECK-LABEL: test_svst1b_scatter_u32offset_u32
253 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
254 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
255 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %0, <vscale x 4 x i1> %1, i8* %base, <vscale x 4 x i32> %offsets)
256 // CHECK: ret void
257 return SVE_ACLE_FUNC(svst1b_scatter_,u32,offset,_u32)(pg, base, offsets, data);
258 }
259
test_svst1b_scatter_u64offset_u64(svbool_t pg,uint8_t * base,svuint64_t offsets,svuint64_t data)260 void test_svst1b_scatter_u64offset_u64(svbool_t pg, uint8_t *base, svuint64_t offsets, svuint64_t data)
261 {
262 // CHECK-LABEL: test_svst1b_scatter_u64offset_u64
263 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
264 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
265 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
266 // CHECK: ret void
267 return SVE_ACLE_FUNC(svst1b_scatter_,u64,offset,_u64)(pg, base, offsets, data);
268 }
269
test_svst1b_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)270 void test_svst1b_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
271 {
272 // CHECK-LABEL: test_svst1b_scatter_u32base_offset_s32
273 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
274 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
275 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
276 // CHECK: ret void
277 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
278 }
279
test_svst1b_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)280 void test_svst1b_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
281 {
282 // CHECK-LABEL: test_svst1b_scatter_u64base_offset_s64
283 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
284 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
285 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
286 // CHECK: ret void
287 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
288 }
289
test_svst1b_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)290 void test_svst1b_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
291 {
292 // CHECK-LABEL: test_svst1b_scatter_u32base_offset_u32
293 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
294 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
295 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
296 // CHECK: ret void
297 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
298 }
299
test_svst1b_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)300 void test_svst1b_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
301 {
302 // CHECK-LABEL: test_svst1b_scatter_u64base_offset_u64
303 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
304 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
305 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
306 // CHECK: ret void
307 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
308 }
309