1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null
5 #include <arm_sve.h>
6
7 #ifdef SVE_OVERLOADED_FORMS
8 // A simple used,unused... macro, long enough to represent any SVE builtin.
9 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
10 #else
11 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
12 #endif
13
test_svst1b_s16(svbool_t pg,int8_t * base,svint16_t data)14 void test_svst1b_s16(svbool_t pg, int8_t *base, svint16_t data)
15 {
16 // CHECK-LABEL: test_svst1b_s16
17 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
18 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
19 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %base)
20 // CHECK: ret void
21 return SVE_ACLE_FUNC(svst1b,_s16,,)(pg, base, data);
22 }
23
test_svst1b_s32(svbool_t pg,int8_t * base,svint32_t data)24 void test_svst1b_s32(svbool_t pg, int8_t *base, svint32_t data)
25 {
26 // CHECK-LABEL: test_svst1b_s32
27 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
28 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
29 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %base)
30 // CHECK: ret void
31 return SVE_ACLE_FUNC(svst1b,_s32,,)(pg, base, data);
32 }
33
test_svst1b_s64(svbool_t pg,int8_t * base,svint64_t data)34 void test_svst1b_s64(svbool_t pg, int8_t *base, svint64_t data)
35 {
36 // CHECK-LABEL: test_svst1b_s64
37 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
38 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
39 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %base)
40 // CHECK: ret void
41 return SVE_ACLE_FUNC(svst1b,_s64,,)(pg, base, data);
42 }
43
test_svst1b_u16(svbool_t pg,uint8_t * base,svuint16_t data)44 void test_svst1b_u16(svbool_t pg, uint8_t *base, svuint16_t data)
45 {
46 // CHECK-LABEL: test_svst1b_u16
47 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
48 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
49 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %base)
50 // CHECK: ret void
51 return SVE_ACLE_FUNC(svst1b,_u16,,)(pg, base, data);
52 }
53
test_svst1b_u32(svbool_t pg,uint8_t * base,svuint32_t data)54 void test_svst1b_u32(svbool_t pg, uint8_t *base, svuint32_t data)
55 {
56 // CHECK-LABEL: test_svst1b_u32
57 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
58 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
59 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %base)
60 // CHECK: ret void
61 return SVE_ACLE_FUNC(svst1b,_u32,,)(pg, base, data);
62 }
63
test_svst1b_u64(svbool_t pg,uint8_t * base,svuint64_t data)64 void test_svst1b_u64(svbool_t pg, uint8_t *base, svuint64_t data)
65 {
66 // CHECK-LABEL: test_svst1b_u64
67 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
68 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
69 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %base)
70 // CHECK: ret void
71 return SVE_ACLE_FUNC(svst1b,_u64,,)(pg, base, data);
72 }
73
test_svst1b_vnum_s16(svbool_t pg,int8_t * base,int64_t vnum,svint16_t data)74 void test_svst1b_vnum_s16(svbool_t pg, int8_t *base, int64_t vnum, svint16_t data)
75 {
76 // CHECK-LABEL: test_svst1b_vnum_s16
77 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
78 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 8 x i8>*
79 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
80 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i8>, <vscale x 8 x i8>* %[[BASE]], i64 %vnum, i64 0
81 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %[[GEP]])
82 // CHECK: ret void
83 return SVE_ACLE_FUNC(svst1b_vnum,_s16,,)(pg, base, vnum, data);
84 }
85
test_svst1b_vnum_s32(svbool_t pg,int8_t * base,int64_t vnum,svint32_t data)86 void test_svst1b_vnum_s32(svbool_t pg, int8_t *base, int64_t vnum, svint32_t data)
87 {
88 // CHECK-LABEL: test_svst1b_vnum_s32
89 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
90 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 4 x i8>*
91 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
92 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i8>, <vscale x 4 x i8>* %[[BASE]], i64 %vnum, i64 0
93 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %[[GEP]])
94 // CHECK: ret void
95 return SVE_ACLE_FUNC(svst1b_vnum,_s32,,)(pg, base, vnum, data);
96 }
97
test_svst1b_vnum_s64(svbool_t pg,int8_t * base,int64_t vnum,svint64_t data)98 void test_svst1b_vnum_s64(svbool_t pg, int8_t *base, int64_t vnum, svint64_t data)
99 {
100 // CHECK-LABEL: test_svst1b_vnum_s64
101 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
102 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 2 x i8>*
103 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
104 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i8>, <vscale x 2 x i8>* %[[BASE]], i64 %vnum, i64 0
105 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %[[GEP]])
106 // CHECK: ret void
107 return SVE_ACLE_FUNC(svst1b_vnum,_s64,,)(pg, base, vnum, data);
108 }
109
test_svst1b_vnum_u16(svbool_t pg,uint8_t * base,int64_t vnum,svuint16_t data)110 void test_svst1b_vnum_u16(svbool_t pg, uint8_t *base, int64_t vnum, svuint16_t data)
111 {
112 // CHECK-LABEL: test_svst1b_vnum_u16
113 // CHECK-DAG: %[[PG:.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> %pg)
114 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 8 x i8>*
115 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 8 x i16> %data to <vscale x 8 x i8>
116 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 8 x i8>, <vscale x 8 x i8>* %[[BASE]], i64 %vnum, i64 0
117 // CHECK: call void @llvm.aarch64.sve.st1.nxv8i8(<vscale x 8 x i8> %[[DATA]], <vscale x 8 x i1> %[[PG]], i8* %[[GEP]])
118 // CHECK: ret void
119 return SVE_ACLE_FUNC(svst1b_vnum,_u16,,)(pg, base, vnum, data);
120 }
121
test_svst1b_vnum_u32(svbool_t pg,uint8_t * base,int64_t vnum,svuint32_t data)122 void test_svst1b_vnum_u32(svbool_t pg, uint8_t *base, int64_t vnum, svuint32_t data)
123 {
124 // CHECK-LABEL: test_svst1b_vnum_u32
125 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
126 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 4 x i8>*
127 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
128 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i8>, <vscale x 4 x i8>* %[[BASE]], i64 %vnum, i64 0
129 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i8(<vscale x 4 x i8> %[[DATA]], <vscale x 4 x i1> %[[PG]], i8* %[[GEP]])
130 // CHECK: ret void
131 return SVE_ACLE_FUNC(svst1b_vnum,_u32,,)(pg, base, vnum, data);
132 }
133
test_svst1b_vnum_u64(svbool_t pg,uint8_t * base,int64_t vnum,svuint64_t data)134 void test_svst1b_vnum_u64(svbool_t pg, uint8_t *base, int64_t vnum, svuint64_t data)
135 {
136 // CHECK-LABEL: test_svst1b_vnum_u64
137 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
138 // CHECK-DAG: %[[BASE:.*]] = bitcast i8* %base to <vscale x 2 x i8>*
139 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
140 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i8>, <vscale x 2 x i8>* %[[BASE]], i64 %vnum, i64 0
141 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i8(<vscale x 2 x i8> %[[DATA]], <vscale x 2 x i1> %[[PG]], i8* %[[GEP]])
142 // CHECK: ret void
143 return SVE_ACLE_FUNC(svst1b_vnum,_u64,,)(pg, base, vnum, data);
144 }
145
test_svst1b_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)146 void test_svst1b_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
147 {
148 // CHECK-LABEL: test_svst1b_scatter_u32base_s32
149 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
150 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
151 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
152 // CHECK: ret void
153 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,,_s32)(pg, bases, data);
154 }
155
test_svst1b_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)156 void test_svst1b_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
157 {
158 // CHECK-LABEL: test_svst1b_scatter_u64base_s64
159 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
160 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
161 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
162 // CHECK: ret void
163 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,,_s64)(pg, bases, data);
164 }
165
test_svst1b_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)166 void test_svst1b_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
167 {
168 // CHECK-LABEL: test_svst1b_scatter_u32base_u32
169 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
170 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
171 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
172 // CHECK: ret void
173 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,,_u32)(pg, bases, data);
174 }
175
test_svst1b_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)176 void test_svst1b_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
177 {
178 // CHECK-LABEL: test_svst1b_scatter_u64base_u64
179 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
180 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
181 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
182 // CHECK: ret void
183 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,,_u64)(pg, bases, data);
184 }
185
test_svst1b_scatter_s32offset_s32(svbool_t pg,int8_t * base,svint32_t offsets,svint32_t data)186 void test_svst1b_scatter_s32offset_s32(svbool_t pg, int8_t *base, svint32_t offsets, svint32_t data)
187 {
188 // CHECK-LABEL: test_svst1b_scatter_s32offset_s32
189 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
190 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
191 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
192 // CHECK: ret void
193 return SVE_ACLE_FUNC(svst1b_scatter_,s32,offset,_s32)(pg, base, offsets, data);
194 }
195
test_svst1b_scatter_s64offset_s64(svbool_t pg,int8_t * base,svint64_t offsets,svint64_t data)196 void test_svst1b_scatter_s64offset_s64(svbool_t pg, int8_t *base, svint64_t offsets, svint64_t data)
197 {
198 // CHECK-LABEL: test_svst1b_scatter_s64offset_s64
199 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
200 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
201 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
202 // CHECK: ret void
203 return SVE_ACLE_FUNC(svst1b_scatter_,s64,offset,_s64)(pg, base, offsets, data);
204 }
205
test_svst1b_scatter_s32offset_u32(svbool_t pg,uint8_t * base,svint32_t offsets,svuint32_t data)206 void test_svst1b_scatter_s32offset_u32(svbool_t pg, uint8_t *base, svint32_t offsets, svuint32_t data)
207 {
208 // CHECK-LABEL: test_svst1b_scatter_s32offset_u32
209 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
210 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
211 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
212 // CHECK: ret void
213 return SVE_ACLE_FUNC(svst1b_scatter_,s32,offset,_u32)(pg, base, offsets, data);
214 }
215
test_svst1b_scatter_s64offset_u64(svbool_t pg,uint8_t * base,svint64_t offsets,svuint64_t data)216 void test_svst1b_scatter_s64offset_u64(svbool_t pg, uint8_t *base, svint64_t offsets, svuint64_t data)
217 {
218 // CHECK-LABEL: test_svst1b_scatter_s64offset_u64
219 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
220 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
221 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
222 // CHECK: ret void
223 return SVE_ACLE_FUNC(svst1b_scatter_,s64,offset,_u64)(pg, base, offsets, data);
224 }
225
test_svst1b_scatter_u32offset_s32(svbool_t pg,int8_t * base,svuint32_t offsets,svint32_t data)226 void test_svst1b_scatter_u32offset_s32(svbool_t pg, int8_t *base, svuint32_t offsets, svint32_t data)
227 {
228 // CHECK-LABEL: test_svst1b_scatter_u32offset_s32
229 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
230 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
231 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i8* %base, <vscale x 4 x i32> %offsets)
232 // CHECK: ret void
233 return SVE_ACLE_FUNC(svst1b_scatter_,u32,offset,_s32)(pg, base, offsets, data);
234 }
235
test_svst1b_scatter_u64offset_s64(svbool_t pg,int8_t * base,svuint64_t offsets,svint64_t data)236 void test_svst1b_scatter_u64offset_s64(svbool_t pg, int8_t *base, svuint64_t offsets, svint64_t data)
237 {
238 // CHECK-LABEL: test_svst1b_scatter_u64offset_s64
239 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
240 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
241 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
242 // CHECK: ret void
243 return SVE_ACLE_FUNC(svst1b_scatter_,u64,offset,_s64)(pg, base, offsets, data);
244 }
245
test_svst1b_scatter_u32offset_u32(svbool_t pg,uint8_t * base,svuint32_t offsets,svuint32_t data)246 void test_svst1b_scatter_u32offset_u32(svbool_t pg, uint8_t *base, svuint32_t offsets, svuint32_t data)
247 {
248 // CHECK-LABEL: test_svst1b_scatter_u32offset_u32
249 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
250 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
251 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %0, <vscale x 4 x i1> %1, i8* %base, <vscale x 4 x i32> %offsets)
252 // CHECK: ret void
253 return SVE_ACLE_FUNC(svst1b_scatter_,u32,offset,_u32)(pg, base, offsets, data);
254 }
255
test_svst1b_scatter_u64offset_u64(svbool_t pg,uint8_t * base,svuint64_t offsets,svuint64_t data)256 void test_svst1b_scatter_u64offset_u64(svbool_t pg, uint8_t *base, svuint64_t offsets, svuint64_t data)
257 {
258 // CHECK-LABEL: test_svst1b_scatter_u64offset_u64
259 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
260 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
261 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i8(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i8* %base, <vscale x 2 x i64> %offsets)
262 // CHECK: ret void
263 return SVE_ACLE_FUNC(svst1b_scatter_,u64,offset,_u64)(pg, base, offsets, data);
264 }
265
test_svst1b_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)266 void test_svst1b_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
267 {
268 // CHECK-LABEL: test_svst1b_scatter_u32base_offset_s32
269 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
270 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
271 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
272 // CHECK: ret void
273 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
274 }
275
test_svst1b_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)276 void test_svst1b_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
277 {
278 // CHECK-LABEL: test_svst1b_scatter_u64base_offset_s64
279 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
280 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
281 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
282 // CHECK: ret void
283 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
284 }
285
test_svst1b_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)286 void test_svst1b_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
287 {
288 // CHECK-LABEL: test_svst1b_scatter_u32base_offset_u32
289 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
290 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
291 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i8.nxv4i32(<vscale x 4 x i8> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
292 // CHECK: ret void
293 return SVE_ACLE_FUNC(svst1b_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
294 }
295
test_svst1b_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)296 void test_svst1b_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
297 {
298 // CHECK-LABEL: test_svst1b_scatter_u64base_offset_u64
299 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
300 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
301 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i8.nxv2i64(<vscale x 2 x i8> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
302 // CHECK: ret void
303 return SVE_ACLE_FUNC(svst1b_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
304 }
305