1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null
5 #include <arm_sve.h>
6
7 #ifdef SVE_OVERLOADED_FORMS
8 // A simple used,unused... macro, long enough to represent any SVE builtin.
9 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
10 #else
11 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
12 #endif
13
test_svst1w_s64(svbool_t pg,int32_t * base,svint64_t data)14 void test_svst1w_s64(svbool_t pg, int32_t *base, svint64_t data)
15 {
16 // CHECK-LABEL: test_svst1w_s64
17 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
18 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
19 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i32(<vscale x 2 x i32> %[[DATA]], <vscale x 2 x i1> %[[PG]], i32* %base)
20 // CHECK: ret void
21 return SVE_ACLE_FUNC(svst1w,_s64,,)(pg, base, data);
22 }
23
test_svst1w_u64(svbool_t pg,uint32_t * base,svuint64_t data)24 void test_svst1w_u64(svbool_t pg, uint32_t *base, svuint64_t data)
25 {
26 // CHECK-LABEL: test_svst1w_u64
27 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
28 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
29 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i32(<vscale x 2 x i32> %[[DATA]], <vscale x 2 x i1> %[[PG]], i32* %base)
30 // CHECK: ret void
31 return SVE_ACLE_FUNC(svst1w,_u64,,)(pg, base, data);
32 }
33
test_svst1w_vnum_s64(svbool_t pg,int32_t * base,int64_t vnum,svint64_t data)34 void test_svst1w_vnum_s64(svbool_t pg, int32_t *base, int64_t vnum, svint64_t data)
35 {
36 // CHECK-LABEL: test_svst1w_vnum_s64
37 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
38 // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 2 x i32>*
39 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
40 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i32>, <vscale x 2 x i32>* %[[BASE]], i64 %vnum, i64 0
41 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i32(<vscale x 2 x i32> %[[DATA]], <vscale x 2 x i1> %[[PG]], i32* %[[GEP]])
42 // CHECK: ret void
43 return SVE_ACLE_FUNC(svst1w_vnum,_s64,,)(pg, base, vnum, data);
44 }
45
test_svst1w_vnum_u64(svbool_t pg,uint32_t * base,int64_t vnum,svuint64_t data)46 void test_svst1w_vnum_u64(svbool_t pg, uint32_t *base, int64_t vnum, svuint64_t data)
47 {
48 // CHECK-LABEL: test_svst1w_vnum_u64
49 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
50 // CHECK-DAG: %[[BASE:.*]] = bitcast i32* %base to <vscale x 2 x i32>*
51 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
52 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i32>, <vscale x 2 x i32>* %[[BASE]], i64 %vnum, i64 0
53 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i32(<vscale x 2 x i32> %[[DATA]], <vscale x 2 x i1> %[[PG]], i32* %[[GEP]])
54 // CHECK: ret void
55 return SVE_ACLE_FUNC(svst1w_vnum,_u64,,)(pg, base, vnum, data);
56 }
57
test_svst1w_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)58 void test_svst1w_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
59 {
60 // CHECK-LABEL: test_svst1w_scatter_u64base_s64
61 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
62 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
63 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
64 // CHECK: ret void
65 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,,_s64)(pg, bases, data);
66 }
67
test_svst1w_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)68 void test_svst1w_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
69 {
70 // CHECK-LABEL: test_svst1w_scatter_u64base_u64
71 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
72 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
73 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
74 // CHECK: ret void
75 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,,_u64)(pg, bases, data);
76 }
77
test_svst1w_scatter_s64offset_s64(svbool_t pg,int32_t * base,svint64_t offsets,svint64_t data)78 void test_svst1w_scatter_s64offset_s64(svbool_t pg, int32_t *base, svint64_t offsets, svint64_t data)
79 {
80 // CHECK-LABEL: test_svst1w_scatter_s64offset_s64
81 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
82 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
83 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %offsets)
84 // CHECK: ret void
85 return SVE_ACLE_FUNC(svst1w_scatter_,s64,offset,_s64)(pg, base, offsets, data);
86 }
87
test_svst1w_scatter_s64offset_u64(svbool_t pg,uint32_t * base,svint64_t offsets,svuint64_t data)88 void test_svst1w_scatter_s64offset_u64(svbool_t pg, uint32_t *base, svint64_t offsets, svuint64_t data)
89 {
90 // CHECK-LABEL: test_svst1w_scatter_s64offset_u64
91 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
92 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
93 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %offsets)
94 // CHECK: ret void
95 return SVE_ACLE_FUNC(svst1w_scatter_,s64,offset,_u64)(pg, base, offsets, data);
96 }
97
test_svst1w_scatter_u64offset_s64(svbool_t pg,int32_t * base,svuint64_t offsets,svint64_t data)98 void test_svst1w_scatter_u64offset_s64(svbool_t pg, int32_t *base, svuint64_t offsets, svint64_t data)
99 {
100 // CHECK-LABEL: test_svst1w_scatter_u64offset_s64
101 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
102 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
103 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %offsets)
104 // CHECK: ret void
105 return SVE_ACLE_FUNC(svst1w_scatter_,u64,offset,_s64)(pg, base, offsets, data);
106 }
107
test_svst1w_scatter_u64offset_u64(svbool_t pg,uint32_t * base,svuint64_t offsets,svuint64_t data)108 void test_svst1w_scatter_u64offset_u64(svbool_t pg, uint32_t *base, svuint64_t offsets, svuint64_t data)
109 {
110 // CHECK-LABEL: test_svst1w_scatter_u64offset_u64
111 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
112 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
113 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %offsets)
114 // CHECK: ret void
115 return SVE_ACLE_FUNC(svst1w_scatter_,u64,offset,_u64)(pg, base, offsets, data);
116 }
117
test_svst1w_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)118 void test_svst1w_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
119 {
120 // CHECK-LABEL: test_svst1w_scatter_u64base_offset_s64
121 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
122 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
123 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
124 // CHECK: ret void
125 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
126 }
127
test_svst1w_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)128 void test_svst1w_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
129 {
130 // CHECK-LABEL: test_svst1w_scatter_u64base_offset_u64
131 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
132 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
133 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
134 // CHECK: ret void
135 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
136 }
137
test_svst1w_scatter_s64index_s64(svbool_t pg,int32_t * base,svint64_t indices,svint64_t data)138 void test_svst1w_scatter_s64index_s64(svbool_t pg, int32_t *base, svint64_t indices, svint64_t data)
139 {
140 // CHECK-LABEL: test_svst1w_scatter_s64index_s64
141 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
142 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
143 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %indices)
144 // CHECK: ret void
145 return SVE_ACLE_FUNC(svst1w_scatter_,s64,index,_s64)(pg, base, indices, data);
146 }
147
test_svst1w_scatter_s64index_u64(svbool_t pg,uint32_t * base,svint64_t indices,svuint64_t data)148 void test_svst1w_scatter_s64index_u64(svbool_t pg, uint32_t *base, svint64_t indices, svuint64_t data)
149 {
150 // CHECK-LABEL: test_svst1w_scatter_s64index_u64
151 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
152 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
153 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> %0, <vscale x 2 x i1> %1, i32* %base, <vscale x 2 x i64> %indices)
154 // CHECK: ret void
155 return SVE_ACLE_FUNC(svst1w_scatter_,s64,index,_u64)(pg, base, indices, data);
156 }
157
test_svst1w_scatter_u64index_s64(svbool_t pg,int32_t * base,svuint64_t indices,svint64_t data)158 void test_svst1w_scatter_u64index_s64(svbool_t pg, int32_t *base, svuint64_t indices, svint64_t data)
159 {
160 // CHECK-LABEL: test_svst1w_scatter_u64index_s64
161 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
162 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
163 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %indices)
164 // CHECK: ret void
165 return SVE_ACLE_FUNC(svst1w_scatter_,u64,index,_s64)(pg, base, indices, data);
166 }
167
test_svst1w_scatter_u64index_u64(svbool_t pg,uint32_t * base,svuint64_t indices,svuint64_t data)168 void test_svst1w_scatter_u64index_u64(svbool_t pg, uint32_t *base, svuint64_t indices, svuint64_t data)
169 {
170 // CHECK-LABEL: test_svst1w_scatter_u64index_u64
171 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
172 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
173 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i32(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i32* %base, <vscale x 2 x i64> %indices)
174 // CHECK: ret void
175 return SVE_ACLE_FUNC(svst1w_scatter_,u64,index,_u64)(pg, base, indices, data);
176 }
177
test_svst1w_scatter_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index,svint64_t data)178 void test_svst1w_scatter_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index, svint64_t data)
179 {
180 // CHECK-LABEL: test_svst1w_scatter_u64base_index_s64
181 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
182 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
183 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
184 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
185 // CHECK: ret void
186 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,_index,_s64)(pg, bases, index, data);
187 }
188
test_svst1w_scatter_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index,svuint64_t data)189 void test_svst1w_scatter_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index, svuint64_t data)
190 {
191 // CHECK-LABEL: test_svst1w_scatter_u64base_index_u64
192 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
193 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
194 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 2
195 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i32.nxv2i64(<vscale x 2 x i32> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
196 // CHECK: ret void
197 return SVE_ACLE_FUNC(svst1w_scatter,_u64base,_index,_u64)(pg, bases, index, data);
198 }
199