1 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
5 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify -verify-ignore-unexpected=error %s
6 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify=overload -verify-ignore-unexpected=error %s
7
8 #include <arm_sve.h>
9
10 #ifdef SVE_OVERLOADED_FORMS
11 // A simple used,unused... macro, long enough to represent any SVE builtin.
12 #define SVE_ACLE_FUNC(A1, A2_UNUSED, A3, A4_UNUSED) A1##A3
13 #else
14 #define SVE_ACLE_FUNC(A1, A2, A3, A4) A1##A2##A3##A4
15 #endif
16
test_svldnt1_gather_u32base_s32(svbool_t pg,svuint32_t bases)17 svint32_t test_svldnt1_gather_u32base_s32(svbool_t pg, svuint32_t bases) {
18 // CHECK-LABEL: test_svldnt1_gather_u32base_s32
19 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
20 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
21 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
22 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_s32'}}
23 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_s32'}}
24 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _s32, )(pg, bases);
25 }
26
test_svldnt1_gather_u64base_s64(svbool_t pg,svuint64_t bases)27 svint64_t test_svldnt1_gather_u64base_s64(svbool_t pg, svuint64_t bases) {
28 // CHECK-LABEL: test_svldnt1_gather_u64base_s64
29 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
30 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
31 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
32 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_s64'}}
33 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_s64'}}
34 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _s64, )(pg, bases);
35 }
36
test_svldnt1_gather_u32base_u32(svbool_t pg,svuint32_t bases)37 svuint32_t test_svldnt1_gather_u32base_u32(svbool_t pg, svuint32_t bases) {
38 // CHECK-LABEL: test_svldnt1_gather_u32base_u32
39 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
40 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
41 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
42 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_u32'}}
43 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_u32'}}
44 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _u32, )(pg, bases);
45 }
46
test_svldnt1_gather_u64base_u64(svbool_t pg,svuint64_t bases)47 svuint64_t test_svldnt1_gather_u64base_u64(svbool_t pg, svuint64_t bases) {
48 // CHECK-LABEL: test_svldnt1_gather_u64base_u64
49 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
50 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
51 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
52 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_u64'}}
53 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_u64'}}
54 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _u64, )(pg, bases);
55 }
56
test_svldnt1_gather_u32base_f32(svbool_t pg,svuint32_t bases)57 svfloat32_t test_svldnt1_gather_u32base_f32(svbool_t pg, svuint32_t bases) {
58 // CHECK-LABEL: test_svldnt1_gather_u32base_f32
59 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
60 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
61 // CHECK: ret <vscale x 4 x float> [[LOAD]]
62 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_f32'}}
63 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_f32'}}
64 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _f32, )(pg, bases);
65 }
66
test_svldnt1_gather_u64base_f64(svbool_t pg,svuint64_t bases)67 svfloat64_t test_svldnt1_gather_u64base_f64(svbool_t pg, svuint64_t bases) {
68 // CHECK-LABEL: test_svldnt1_gather_u64base_f64
69 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
70 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
71 // CHECK: ret <vscale x 2 x double> [[LOAD]]
72 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_f64'}}
73 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_f64'}}
74 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _f64, )(pg, bases);
75 }
76
test_svldnt1_gather_s64offset_s64(svbool_t pg,const int64_t * base,svint64_t offsets)77 svint64_t test_svldnt1_gather_s64offset_s64(svbool_t pg, const int64_t *base, svint64_t offsets) {
78 // CHECK-LABEL: test_svldnt1_gather_s64offset_s64
79 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
80 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
81 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
82 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
83 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_s64'}}
84 return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _s64)(pg, base, offsets);
85 }
86
test_svldnt1_gather_s64offset_u64(svbool_t pg,const uint64_t * base,svint64_t offsets)87 svuint64_t test_svldnt1_gather_s64offset_u64(svbool_t pg, const uint64_t *base, svint64_t offsets) {
88 // CHECK-LABEL: test_svldnt1_gather_s64offset_u64
89 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
90 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
91 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
92 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
93 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_u64'}}
94 return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _u64)(pg, base, offsets);
95 }
96
test_svldnt1_gather_s64offset_f64(svbool_t pg,const float64_t * base,svint64_t offsets)97 svfloat64_t test_svldnt1_gather_s64offset_f64(svbool_t pg, const float64_t *base, svint64_t offsets) {
98 // CHECK-LABEL: test_svldnt1_gather_s64offset_f64
99 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
100 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %offsets)
101 // CHECK: ret <vscale x 2 x double> [[LOAD]]
102 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
103 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_f64'}}
104 return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _f64)(pg, base, offsets);
105 }
106
test_svldnt1_gather_u32offset_s32(svbool_t pg,const int32_t * base,svuint32_t offsets)107 svint32_t test_svldnt1_gather_u32offset_s32(svbool_t pg, const int32_t *base, svuint32_t offsets) {
108 // CHECK-LABEL: test_svldnt1_gather_u32offset_s32
109 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
110 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i32(<vscale x 4 x i1> [[PG]], i32* %base, <vscale x 4 x i32> %offsets)
111 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
112 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
113 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_s32'}}
114 return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _s32)(pg, base, offsets);
115 }
116
test_svldnt1_gather_u64offset_s64(svbool_t pg,const int64_t * base,svuint64_t offsets)117 svint64_t test_svldnt1_gather_u64offset_s64(svbool_t pg, const int64_t *base, svuint64_t offsets) {
118 // CHECK-LABEL: test_svldnt1_gather_u64offset_s64
119 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
120 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
121 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
122 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
123 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_s64'}}
124 return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _s64)(pg, base, offsets);
125 }
126
test_svldnt1_gather_u32offset_u32(svbool_t pg,const uint32_t * base,svuint32_t offsets)127 svuint32_t test_svldnt1_gather_u32offset_u32(svbool_t pg, const uint32_t *base, svuint32_t offsets) {
128 // CHECK-LABEL: test_svldnt1_gather_u32offset_u32
129 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
130 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i32(<vscale x 4 x i1> [[PG]], i32* %base, <vscale x 4 x i32> %offsets)
131 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
132 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
133 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_u32'}}
134 return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _u32)(pg, base, offsets);
135 }
136
test_svldnt1_gather_u64offset_u64(svbool_t pg,const uint64_t * base,svuint64_t offsets)137 svuint64_t test_svldnt1_gather_u64offset_u64(svbool_t pg, const uint64_t *base, svuint64_t offsets) {
138 // CHECK-LABEL: test_svldnt1_gather_u64offset_u64
139 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
140 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
141 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
142 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
143 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_u64'}}
144 return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _u64)(pg, base, offsets);
145 }
146
test_svldnt1_gather_u32offset_f32(svbool_t pg,const float32_t * base,svuint32_t offsets)147 svfloat32_t test_svldnt1_gather_u32offset_f32(svbool_t pg, const float32_t *base, svuint32_t offsets) {
148 // CHECK-LABEL: test_svldnt1_gather_u32offset_f32
149 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
150 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4f32(<vscale x 4 x i1> [[PG]], float* %base, <vscale x 4 x i32> %offsets)
151 // CHECK: ret <vscale x 4 x float> [[LOAD]]
152 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
153 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_f32'}}
154 return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _f32)(pg, base, offsets);
155 }
156
test_svldnt1_gather_u64offset_f64(svbool_t pg,const float64_t * base,svuint64_t offsets)157 svfloat64_t test_svldnt1_gather_u64offset_f64(svbool_t pg, const float64_t *base, svuint64_t offsets) {
158 // CHECK-LABEL: test_svldnt1_gather_u64offset_f64
159 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
160 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %offsets)
161 // CHECK: ret <vscale x 2 x double> [[LOAD]]
162 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
163 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_f64'}}
164 return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _f64)(pg, base, offsets);
165 }
166
test_svldnt1_gather_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset)167 svint32_t test_svldnt1_gather_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset) {
168 // CHECK-LABEL: test_svldnt1_gather_u32base_offset_s32
169 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
170 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
171 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
172 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_s32'}}
173 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_s32'}}
174 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_s32, )(pg, bases, offset);
175 }
176
test_svldnt1_gather_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset)177 svint64_t test_svldnt1_gather_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset) {
178 // CHECK-LABEL: test_svldnt1_gather_u64base_offset_s64
179 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
180 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
181 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
182 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_s64'}}
183 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_s64'}}
184 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_s64, )(pg, bases, offset);
185 }
186
test_svldnt1_gather_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset)187 svuint32_t test_svldnt1_gather_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset) {
188 // CHECK-LABEL: test_svldnt1_gather_u32base_offset_u32
189 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
190 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
191 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
192 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_u32'}}
193 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_u32'}}
194 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_u32, )(pg, bases, offset);
195 }
196
test_svldnt1_gather_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset)197 svuint64_t test_svldnt1_gather_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset) {
198 // CHECK-LABEL: test_svldnt1_gather_u64base_offset_u64
199 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
200 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
201 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
202 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_u64'}}
203 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_u64'}}
204 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_u64, )(pg, bases, offset);
205 }
206
test_svldnt1_gather_u32base_offset_f32(svbool_t pg,svuint32_t bases,int64_t offset)207 svfloat32_t test_svldnt1_gather_u32base_offset_f32(svbool_t pg, svuint32_t bases, int64_t offset) {
208 // CHECK-LABEL: test_svldnt1_gather_u32base_offset_f32
209 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
210 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
211 // CHECK: ret <vscale x 4 x float> [[LOAD]]
212 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_f32'}}
213 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_f32'}}
214 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_f32, )(pg, bases, offset);
215 }
216
test_svldnt1_gather_u64base_offset_f64(svbool_t pg,svuint64_t bases,int64_t offset)217 svfloat64_t test_svldnt1_gather_u64base_offset_f64(svbool_t pg, svuint64_t bases, int64_t offset) {
218 // CHECK-LABEL: test_svldnt1_gather_u64base_offset_f64
219 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
220 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
221 // CHECK: ret <vscale x 2 x double> [[LOAD]]
222 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_f64'}}
223 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_f64'}}
224 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_f64, )(pg, bases, offset);
225 }
226
test_svldnt1_gather_s64index_s64(svbool_t pg,const int64_t * base,svint64_t indices)227 svint64_t test_svldnt1_gather_s64index_s64(svbool_t pg, const int64_t *base, svint64_t indices) {
228 // CHECK-LABEL: test_svldnt1_gather_s64index_s64
229 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
230 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
231 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
232 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
233 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_s64'}}
234 return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _s64)(pg, base, indices);
235 }
236
test_svldnt1_gather_s64index_u64(svbool_t pg,const uint64_t * base,svint64_t indices)237 svuint64_t test_svldnt1_gather_s64index_u64(svbool_t pg, const uint64_t *base, svint64_t indices) {
238 // CHECK-LABEL: test_svldnt1_gather_s64index_u64
239 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
240 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
241 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
242 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
243 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_u64'}}
244 return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _u64)(pg, base, indices);
245 }
246
test_svldnt1_gather_s64index_f64(svbool_t pg,const float64_t * base,svint64_t indices)247 svfloat64_t test_svldnt1_gather_s64index_f64(svbool_t pg, const float64_t *base, svint64_t indices) {
248 // CHECK-LABEL: test_svldnt1_gather_s64index_f64
249 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
250 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.index.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %indices)
251 // CHECK: ret <vscale x 2 x double> [[LOAD]]
252 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
253 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_f64'}}
254 return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _f64)(pg, base, indices);
255 }
256
test_svldnt1_gather_u64index_s64(svbool_t pg,const int64_t * base,svuint64_t indices)257 svint64_t test_svldnt1_gather_u64index_s64(svbool_t pg, const int64_t *base, svuint64_t indices) {
258 // CHECK-LABEL: test_svldnt1_gather_u64index_s64
259 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
260 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
261 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
262 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
263 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_s64'}}
264 return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _s64)(pg, base, indices);
265 }
266
test_svldnt1_gather_u64index_u64(svbool_t pg,const uint64_t * base,svuint64_t indices)267 svuint64_t test_svldnt1_gather_u64index_u64(svbool_t pg, const uint64_t *base, svuint64_t indices) {
268 // CHECK-LABEL: test_svldnt1_gather_u64index_u64
269 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
270 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
271 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
272 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
273 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_u64'}}
274 return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _u64)(pg, base, indices);
275 }
276
test_svldnt1_gather_u64index_f64(svbool_t pg,const float64_t * base,svuint64_t indices)277 svfloat64_t test_svldnt1_gather_u64index_f64(svbool_t pg, const float64_t *base, svuint64_t indices) {
278 // CHECK-LABEL: test_svldnt1_gather_u64index_f64
279 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
280 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.index.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %indices)
281 // CHECK: ret <vscale x 2 x double> [[LOAD]]
282 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
283 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_f64'}}
284 return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _f64)(pg, base, indices);
285 }
286
test_svldnt1_gather_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index)287 svint32_t test_svldnt1_gather_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index) {
288 // CHECK-LABEL: test_svldnt1_gather_u32base_index_s32
289 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
290 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
291 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
292 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
293 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_s32'}}
294 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_s32'}}
295 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_s32, )(pg, bases, index);
296 }
297
test_svldnt1_gather_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index)298 svint64_t test_svldnt1_gather_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index) {
299 // CHECK-LABEL: test_svldnt1_gather_u64base_index_s64
300 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
301 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
302 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
303 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
304 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_s64'}}
305 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_s64'}}
306 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_s64, )(pg, bases, index);
307 }
308
test_svldnt1_gather_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index)309 svuint32_t test_svldnt1_gather_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index) {
310 // CHECK-LABEL: test_svldnt1_gather_u32base_index_u32
311 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
312 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
313 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
314 // CHECK: ret <vscale x 4 x i32> [[LOAD]]
315 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_u32'}}
316 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_u32'}}
317 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_u32, )(pg, bases, index);
318 }
319
test_svldnt1_gather_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index)320 svuint64_t test_svldnt1_gather_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index) {
321 // CHECK-LABEL: test_svldnt1_gather_u64base_index_u64
322 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
323 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
324 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
325 // CHECK: ret <vscale x 2 x i64> [[LOAD]]
326 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_u64'}}
327 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_u64'}}
328 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_u64, )(pg, bases, index);
329 }
330
test_svldnt1_gather_u32base_index_f32(svbool_t pg,svuint32_t bases,int64_t index)331 svfloat32_t test_svldnt1_gather_u32base_index_f32(svbool_t pg, svuint32_t bases, int64_t index) {
332 // CHECK-LABEL: test_svldnt1_gather_u32base_index_f32
333 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
334 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
335 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
336 // CHECK: ret <vscale x 4 x float> [[LOAD]]
337 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_f32'}}
338 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_f32'}}
339 return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_f32, )(pg, bases, index);
340 }
341
test_svldnt1_gather_u64base_index_f64(svbool_t pg,svuint64_t bases,int64_t index)342 svfloat64_t test_svldnt1_gather_u64base_index_f64(svbool_t pg, svuint64_t bases, int64_t index) {
343 // CHECK-LABEL: test_svldnt1_gather_u64base_index_f64
344 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
345 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
346 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
347 // CHECK: ret <vscale x 2 x double> [[LOAD]]
348 // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_f64'}}
349 // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_f64'}}
350 return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_f64, )(pg, bases, index);
351 }
352