1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null
5 #include <arm_sve.h>
6 
7 #ifdef SVE_OVERLOADED_FORMS
8 // A simple used,unused... macro, long enough to represent any SVE builtin.
9 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
10 #else
11 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
12 #endif
13 
test_svst1h_s32(svbool_t pg,int16_t * base,svint32_t data)14 void test_svst1h_s32(svbool_t pg, int16_t *base, svint32_t data)
15 {
16   // CHECK-LABEL: test_svst1h_s32
17   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
18   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
19   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %base)
20   // CHECK: ret void
21   return SVE_ACLE_FUNC(svst1h,_s32,,)(pg, base, data);
22 }
23 
test_svst1h_s64(svbool_t pg,int16_t * base,svint64_t data)24 void test_svst1h_s64(svbool_t pg, int16_t *base, svint64_t data)
25 {
26   // CHECK-LABEL: test_svst1h_s64
27   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
28   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
29   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %base)
30   // CHECK: ret void
31   return SVE_ACLE_FUNC(svst1h,_s64,,)(pg, base, data);
32 }
33 
test_svst1h_u32(svbool_t pg,uint16_t * base,svuint32_t data)34 void test_svst1h_u32(svbool_t pg, uint16_t *base, svuint32_t data)
35 {
36   // CHECK-LABEL: test_svst1h_u32
37   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
38   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
39   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %base)
40   // CHECK: ret void
41   return SVE_ACLE_FUNC(svst1h,_u32,,)(pg, base, data);
42 }
43 
test_svst1h_u64(svbool_t pg,uint16_t * base,svuint64_t data)44 void test_svst1h_u64(svbool_t pg, uint16_t *base, svuint64_t data)
45 {
46   // CHECK-LABEL: test_svst1h_u64
47   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
48   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
49   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %base)
50   // CHECK: ret void
51   return SVE_ACLE_FUNC(svst1h,_u64,,)(pg, base, data);
52 }
53 
test_svst1h_vnum_s32(svbool_t pg,int16_t * base,int64_t vnum,svint32_t data)54 void test_svst1h_vnum_s32(svbool_t pg, int16_t *base, int64_t vnum, svint32_t data)
55 {
56   // CHECK-LABEL: test_svst1h_vnum_s32
57   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
58   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 4 x i16>*
59   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
60   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i16>, <vscale x 4 x i16>* %[[BASE]], i64 %vnum, i64 0
61   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %[[GEP]])
62   // CHECK: ret void
63   return SVE_ACLE_FUNC(svst1h_vnum,_s32,,)(pg, base, vnum, data);
64 }
65 
test_svst1h_vnum_s64(svbool_t pg,int16_t * base,int64_t vnum,svint64_t data)66 void test_svst1h_vnum_s64(svbool_t pg, int16_t *base, int64_t vnum, svint64_t data)
67 {
68   // CHECK-LABEL: test_svst1h_vnum_s64
69   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
70   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 2 x i16>*
71   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
72   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i16>, <vscale x 2 x i16>* %[[BASE]], i64 %vnum, i64 0
73   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %[[GEP]])
74   // CHECK: ret void
75   return SVE_ACLE_FUNC(svst1h_vnum,_s64,,)(pg, base, vnum, data);
76 }
77 
test_svst1h_vnum_u32(svbool_t pg,uint16_t * base,int64_t vnum,svuint32_t data)78 void test_svst1h_vnum_u32(svbool_t pg, uint16_t *base, int64_t vnum, svuint32_t data)
79 {
80   // CHECK-LABEL: test_svst1h_vnum_u32
81   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
82   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 4 x i16>*
83   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
84   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i16>, <vscale x 4 x i16>* %[[BASE]], i64 %vnum, i64 0
85   // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %[[GEP]])
86   // CHECK: ret void
87   return SVE_ACLE_FUNC(svst1h_vnum,_u32,,)(pg, base, vnum, data);
88 }
89 
test_svst1h_vnum_u64(svbool_t pg,uint16_t * base,int64_t vnum,svuint64_t data)90 void test_svst1h_vnum_u64(svbool_t pg, uint16_t *base, int64_t vnum, svuint64_t data)
91 {
92   // CHECK-LABEL: test_svst1h_vnum_u64
93   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
94   // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 2 x i16>*
95   // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
96   // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i16>, <vscale x 2 x i16>* %[[BASE]], i64 %vnum, i64 0
97   // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %[[GEP]])
98   // CHECK: ret void
99   return SVE_ACLE_FUNC(svst1h_vnum,_u64,,)(pg, base, vnum, data);
100 }
101 
test_svst1h_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)102 void test_svst1h_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
103 {
104   // CHECK-LABEL: test_svst1h_scatter_u32base_s32
105   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
106   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
107   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
108   // CHECK: ret void
109   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,,_s32)(pg, bases, data);
110 }
111 
test_svst1h_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)112 void test_svst1h_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
113 {
114   // CHECK-LABEL: test_svst1h_scatter_u64base_s64
115   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
116   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
117   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
118   // CHECK: ret void
119   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,,_s64)(pg, bases, data);
120 }
121 
test_svst1h_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)122 void test_svst1h_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
123 {
124   // CHECK-LABEL: test_svst1h_scatter_u32base_u32
125   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
126   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
127   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
128   // CHECK: ret void
129   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,,_u32)(pg, bases, data);
130 }
131 
test_svst1h_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)132 void test_svst1h_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
133 {
134   // CHECK-LABEL: test_svst1h_scatter_u64base_u64
135   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
136   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
137   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
138   // CHECK: ret void
139   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,,_u64)(pg, bases, data);
140 }
141 
test_svst1h_scatter_s32offset_s32(svbool_t pg,int16_t * base,svint32_t offsets,svint32_t data)142 void test_svst1h_scatter_s32offset_s32(svbool_t pg, int16_t *base, svint32_t offsets, svint32_t data)
143 {
144   // CHECK-LABEL: test_svst1h_scatter_s32offset_s32
145   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
146   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
147   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
148   // CHECK: ret void
149   return SVE_ACLE_FUNC(svst1h_scatter_,s32,offset,_s32)(pg, base, offsets, data);
150 }
151 
test_svst1h_scatter_s64offset_s64(svbool_t pg,int16_t * base,svint64_t offsets,svint64_t data)152 void test_svst1h_scatter_s64offset_s64(svbool_t pg, int16_t *base, svint64_t offsets, svint64_t data)
153 {
154   // CHECK-LABEL: test_svst1h_scatter_s64offset_s64
155   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
156   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
157   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
158   // CHECK: ret void
159   return SVE_ACLE_FUNC(svst1h_scatter_,s64,offset,_s64)(pg, base, offsets, data);
160 }
161 
test_svst1h_scatter_s32offset_u32(svbool_t pg,uint16_t * base,svint32_t offsets,svuint32_t data)162 void test_svst1h_scatter_s32offset_u32(svbool_t pg, uint16_t *base, svint32_t offsets, svuint32_t data)
163 {
164   // CHECK-LABEL: test_svst1h_scatter_s32offset_u32
165   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
166   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
167   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
168   // CHECK: ret void
169   return SVE_ACLE_FUNC(svst1h_scatter_,s32,offset,_u32)(pg, base, offsets, data);
170 }
171 
test_svst1h_scatter_s64offset_u64(svbool_t pg,uint16_t * base,svint64_t offsets,svuint64_t data)172 void test_svst1h_scatter_s64offset_u64(svbool_t pg, uint16_t *base, svint64_t offsets, svuint64_t data)
173 {
174   // CHECK-LABEL: test_svst1h_scatter_s64offset_u64
175   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
176   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
177   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
178   // CHECK: ret void
179   return SVE_ACLE_FUNC(svst1h_scatter_,s64,offset,_u64)(pg, base, offsets, data);
180 }
181 
test_svst1h_scatter_u32offset_s32(svbool_t pg,int16_t * base,svuint32_t offsets,svint32_t data)182 void test_svst1h_scatter_u32offset_s32(svbool_t pg, int16_t *base, svuint32_t offsets, svint32_t data)
183 {
184   // CHECK-LABEL: test_svst1h_scatter_u32offset_s32
185   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
186   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
187   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
188   // CHECK: ret void
189   return SVE_ACLE_FUNC(svst1h_scatter_,u32,offset,_s32)(pg, base, offsets, data);
190 }
191 
test_svst1h_scatter_u64offset_s64(svbool_t pg,int16_t * base,svuint64_t offsets,svint64_t data)192 void test_svst1h_scatter_u64offset_s64(svbool_t pg, int16_t *base, svuint64_t offsets, svint64_t data)
193 {
194   // CHECK-LABEL: test_svst1h_scatter_u64offset_s64
195   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
196   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
197   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
198   // CHECK: ret void
199   return SVE_ACLE_FUNC(svst1h_scatter_,u64,offset,_s64)(pg, base, offsets, data);
200 }
201 
test_svst1h_scatter_u32offset_u32(svbool_t pg,uint16_t * base,svuint32_t offsets,svuint32_t data)202 void test_svst1h_scatter_u32offset_u32(svbool_t pg, uint16_t *base, svuint32_t offsets, svuint32_t data)
203 {
204   // CHECK-LABEL: test_svst1h_scatter_u32offset_u32
205   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
206   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
207   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
208   // CHECK: ret void
209   return SVE_ACLE_FUNC(svst1h_scatter_,u32,offset,_u32)(pg, base, offsets, data);
210 }
211 
test_svst1h_scatter_u64offset_u64(svbool_t pg,uint16_t * base,svuint64_t offsets,svuint64_t data)212 void test_svst1h_scatter_u64offset_u64(svbool_t pg, uint16_t *base, svuint64_t offsets, svuint64_t data)
213 {
214   // CHECK-LABEL: test_svst1h_scatter_u64offset_u64
215   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
216   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
217   // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
218   // CHECK: ret void
219   return SVE_ACLE_FUNC(svst1h_scatter_,u64,offset,_u64)(pg, base, offsets, data);
220 }
221 
test_svst1h_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)222 void test_svst1h_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
223 {
224   // CHECK-LABEL: test_svst1h_scatter_u32base_offset_s32
225   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
226   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
227   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
228   // CHECK: ret void
229   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
230 }
231 
test_svst1h_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)232 void test_svst1h_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
233 {
234   // CHECK-LABEL: test_svst1h_scatter_u64base_offset_s64
235   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
236   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
237   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
238   // CHECK: ret void
239   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
240 }
241 
test_svst1h_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)242 void test_svst1h_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
243 {
244   // CHECK-LABEL: test_svst1h_scatter_u32base_offset_u32
245   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
246   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
247   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
248   // CHECK: ret void
249   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
250 }
251 
test_svst1h_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)252 void test_svst1h_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
253 {
254   // CHECK-LABEL: test_svst1h_scatter_u64base_offset_u64
255   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
256   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
257   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
258   // CHECK: ret void
259   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
260 }
261 
test_svst1h_scatter_s32index_s32(svbool_t pg,int16_t * base,svint32_t indices,svint32_t data)262 void test_svst1h_scatter_s32index_s32(svbool_t pg, int16_t *base, svint32_t indices, svint32_t data)
263 {
264   // CHECK-LABEL: test_svst1h_scatter_s32index_s32
265   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
266   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
267   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
268   // CHECK: ret void
269   return SVE_ACLE_FUNC(svst1h_scatter_,s32,index,_s32)(pg, base, indices, data);
270 }
271 
test_svst1h_scatter_s64index_s64(svbool_t pg,int16_t * base,svint64_t indices,svint64_t data)272 void test_svst1h_scatter_s64index_s64(svbool_t pg, int16_t *base, svint64_t indices, svint64_t data)
273 {
274   // CHECK-LABEL: test_svst1h_scatter_s64index_s64
275   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
276   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
277   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
278   // CHECK: ret void
279   return SVE_ACLE_FUNC(svst1h_scatter_,s64,index,_s64)(pg, base, indices, data);
280 }
281 
test_svst1h_scatter_s32index_u32(svbool_t pg,uint16_t * base,svint32_t indices,svuint32_t data)282 void test_svst1h_scatter_s32index_u32(svbool_t pg, uint16_t *base, svint32_t indices, svuint32_t data)
283 {
284   // CHECK-LABEL: test_svst1h_scatter_s32index_u32
285   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
286   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
287   // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
288   // CHECK: ret void
289   return SVE_ACLE_FUNC(svst1h_scatter_,s32,index,_u32)(pg, base, indices, data);
290 }
291 
test_svst1h_scatter_s64index_u64(svbool_t pg,uint16_t * base,svint64_t indices,svuint64_t data)292 void test_svst1h_scatter_s64index_u64(svbool_t pg, uint16_t *base, svint64_t indices, svuint64_t data)
293 {
294   // CHECK-LABEL: test_svst1h_scatter_s64index_u64
295   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
296   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
297   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
298   // CHECK: ret void
299   return SVE_ACLE_FUNC(svst1h_scatter_,s64,index,_u64)(pg, base, indices, data);
300 }
301 
test_svst1h_scatter_u32index_s32(svbool_t pg,int16_t * base,svuint32_t indices,svint32_t data)302 void test_svst1h_scatter_u32index_s32(svbool_t pg, int16_t *base, svuint32_t indices, svint32_t data)
303 {
304   // CHECK-LABEL: test_svst1h_scatter_u32index_s32
305   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
306   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
307   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
308   // CHECK: ret void
309   return SVE_ACLE_FUNC(svst1h_scatter_,u32,index,_s32)(pg, base, indices, data);
310 }
311 
test_svst1h_scatter_u64index_s64(svbool_t pg,int16_t * base,svuint64_t indices,svint64_t data)312 void test_svst1h_scatter_u64index_s64(svbool_t pg, int16_t *base, svuint64_t indices, svint64_t data)
313 {
314   // CHECK-LABEL: test_svst1h_scatter_u64index_s64
315   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
316   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
317   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
318   // CHECK: ret void
319   return SVE_ACLE_FUNC(svst1h_scatter_,u64,index,_s64)(pg, base, indices, data);
320 }
321 
test_svst1h_scatter_u32index_u32(svbool_t pg,uint16_t * base,svuint32_t indices,svuint32_t data)322 void test_svst1h_scatter_u32index_u32(svbool_t pg, uint16_t *base, svuint32_t indices, svuint32_t data)
323 {
324   // CHECK-LABEL: test_svst1h_scatter_u32index_u32
325   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
326   // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
327   // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
328   // CHECK: ret void
329   return SVE_ACLE_FUNC(svst1h_scatter_,u32,index,_u32)(pg, base, indices, data);
330 }
331 
test_svst1h_scatter_u64index_u64(svbool_t pg,uint16_t * base,svuint64_t indices,svuint64_t data)332 void test_svst1h_scatter_u64index_u64(svbool_t pg, uint16_t *base, svuint64_t indices, svuint64_t data)
333 {
334   // CHECK-LABEL: test_svst1h_scatter_u64index_u64
335   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
336   // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
337   // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
338   // CHECK: ret void
339   return SVE_ACLE_FUNC(svst1h_scatter_,u64,index,_u64)(pg, base, indices, data);
340 }
341 
test_svst1h_scatter_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index,svint32_t data)342 void test_svst1h_scatter_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index, svint32_t data)
343 {
344   // CHECK-LABEL: test_svst1h_scatter_u32base_index_s32
345   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
346   // CHECK-DAG: %[[PG:.*]]  = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
347   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
348   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
349   // CHECK: ret void
350   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_index,_s32)(pg, bases, index, data);
351 }
352 
test_svst1h_scatter_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index,svint64_t data)353 void test_svst1h_scatter_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index, svint64_t data)
354 {
355   // CHECK-LABEL: test_svst1h_scatter_u64base_index_s64
356   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
357   // CHECK-DAG: %[[PG:.*]]  = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
358   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
359   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
360   // CHECK: ret void
361   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_index,_s64)(pg, bases, index, data);
362 }
363 
test_svst1h_scatter_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index,svuint32_t data)364 void test_svst1h_scatter_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index, svuint32_t data)
365 {
366   // CHECK-LABEL: test_svst1h_scatter_u32base_index_u32
367   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
368   // CHECK-DAG: %[[PG:.*]]  = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
369   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
370   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
371   // CHECK: ret void
372   return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_index,_u32)(pg, bases, index, data);
373 }
374 
test_svst1h_scatter_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index,svuint64_t data)375 void test_svst1h_scatter_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index, svuint64_t data)
376 {
377   // CHECK-LABEL: test_svst1h_scatter_u64base_index_u64
378   // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
379   // CHECK-DAG: %[[PG:.*]]  = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
380   // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
381   // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
382   // CHECK: ret void
383   return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_index,_u64)(pg, bases, index, data);
384 }
385