1 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
2 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify -verify-ignore-unexpected=error %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify=overload -verify-ignore-unexpected=error %s
5 
6 #include <arm_sve.h>
7 
8 #ifdef SVE_OVERLOADED_FORMS
9 // A simple used,unused... macro, long enough to represent any SVE builtin.
10 #define SVE_ACLE_FUNC(A1, A2_UNUSED, A3, A4_UNUSED) A1##A3
11 #else
12 #define SVE_ACLE_FUNC(A1, A2, A3, A4) A1##A2##A3##A4
13 #endif
14 
test_svldnt1_gather_u32base_s32(svbool_t pg,svuint32_t bases)15 svint32_t test_svldnt1_gather_u32base_s32(svbool_t pg, svuint32_t bases) {
16   // CHECK-LABEL: test_svldnt1_gather_u32base_s32
17   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
18   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
19   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
20   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_s32'}}
21   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_s32'}}
22   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _s32, )(pg, bases);
23 }
24 
test_svldnt1_gather_u64base_s64(svbool_t pg,svuint64_t bases)25 svint64_t test_svldnt1_gather_u64base_s64(svbool_t pg, svuint64_t bases) {
26   // CHECK-LABEL: test_svldnt1_gather_u64base_s64
27   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
28   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
29   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
30   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_s64'}}
31   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_s64'}}
32   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _s64, )(pg, bases);
33 }
34 
test_svldnt1_gather_u32base_u32(svbool_t pg,svuint32_t bases)35 svuint32_t test_svldnt1_gather_u32base_u32(svbool_t pg, svuint32_t bases) {
36   // CHECK-LABEL: test_svldnt1_gather_u32base_u32
37   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
38   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
39   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
40   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_u32'}}
41   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_u32'}}
42   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _u32, )(pg, bases);
43 }
44 
test_svldnt1_gather_u64base_u64(svbool_t pg,svuint64_t bases)45 svuint64_t test_svldnt1_gather_u64base_u64(svbool_t pg, svuint64_t bases) {
46   // CHECK-LABEL: test_svldnt1_gather_u64base_u64
47   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
48   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
49   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
50   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_u64'}}
51   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_u64'}}
52   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _u64, )(pg, bases);
53 }
54 
test_svldnt1_gather_u32base_f32(svbool_t pg,svuint32_t bases)55 svfloat32_t test_svldnt1_gather_u32base_f32(svbool_t pg, svuint32_t bases) {
56   // CHECK-LABEL: test_svldnt1_gather_u32base_f32
57   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
58   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
59   // CHECK: ret <vscale x 4 x float> [[LOAD]]
60   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_f32'}}
61   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_f32'}}
62   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _f32, )(pg, bases);
63 }
64 
test_svldnt1_gather_u64base_f64(svbool_t pg,svuint64_t bases)65 svfloat64_t test_svldnt1_gather_u64base_f64(svbool_t pg, svuint64_t bases) {
66   // CHECK-LABEL: test_svldnt1_gather_u64base_f64
67   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
68   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
69   // CHECK: ret <vscale x 2 x double> [[LOAD]]
70   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_f64'}}
71   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_f64'}}
72   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _f64, )(pg, bases);
73 }
74 
test_svldnt1_gather_s64offset_s64(svbool_t pg,const int64_t * base,svint64_t offsets)75 svint64_t test_svldnt1_gather_s64offset_s64(svbool_t pg, const int64_t *base, svint64_t offsets) {
76   // CHECK-LABEL: test_svldnt1_gather_s64offset_s64
77   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
78   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
79   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
80   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
81   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_s64'}}
82   return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _s64)(pg, base, offsets);
83 }
84 
test_svldnt1_gather_s64offset_u64(svbool_t pg,const uint64_t * base,svint64_t offsets)85 svuint64_t test_svldnt1_gather_s64offset_u64(svbool_t pg, const uint64_t *base, svint64_t offsets) {
86   // CHECK-LABEL: test_svldnt1_gather_s64offset_u64
87   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
88   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
89   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
90   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
91   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_u64'}}
92   return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _u64)(pg, base, offsets);
93 }
94 
test_svldnt1_gather_s64offset_f64(svbool_t pg,const float64_t * base,svint64_t offsets)95 svfloat64_t test_svldnt1_gather_s64offset_f64(svbool_t pg, const float64_t *base, svint64_t offsets) {
96   // CHECK-LABEL: test_svldnt1_gather_s64offset_f64
97   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
98   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %offsets)
99   // CHECK: ret <vscale x 2 x double> [[LOAD]]
100   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
101   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64offset_f64'}}
102   return SVE_ACLE_FUNC(svldnt1_gather_, s64, offset, _f64)(pg, base, offsets);
103 }
104 
test_svldnt1_gather_u32offset_s32(svbool_t pg,const int32_t * base,svuint32_t offsets)105 svint32_t test_svldnt1_gather_u32offset_s32(svbool_t pg, const int32_t *base, svuint32_t offsets) {
106   // CHECK-LABEL: test_svldnt1_gather_u32offset_s32
107   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
108   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i32(<vscale x 4 x i1> [[PG]], i32* %base, <vscale x 4 x i32> %offsets)
109   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
110   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
111   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_s32'}}
112   return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _s32)(pg, base, offsets);
113 }
114 
test_svldnt1_gather_u64offset_s64(svbool_t pg,const int64_t * base,svuint64_t offsets)115 svint64_t test_svldnt1_gather_u64offset_s64(svbool_t pg, const int64_t *base, svuint64_t offsets) {
116   // CHECK-LABEL: test_svldnt1_gather_u64offset_s64
117   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
118   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
119   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
120   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
121   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_s64'}}
122   return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _s64)(pg, base, offsets);
123 }
124 
test_svldnt1_gather_u32offset_u32(svbool_t pg,const uint32_t * base,svuint32_t offsets)125 svuint32_t test_svldnt1_gather_u32offset_u32(svbool_t pg, const uint32_t *base, svuint32_t offsets) {
126   // CHECK-LABEL: test_svldnt1_gather_u32offset_u32
127   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
128   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i32(<vscale x 4 x i1> [[PG]], i32* %base, <vscale x 4 x i32> %offsets)
129   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
130   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
131   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_u32'}}
132   return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _u32)(pg, base, offsets);
133 }
134 
test_svldnt1_gather_u64offset_u64(svbool_t pg,const uint64_t * base,svuint64_t offsets)135 svuint64_t test_svldnt1_gather_u64offset_u64(svbool_t pg, const uint64_t *base, svuint64_t offsets) {
136   // CHECK-LABEL: test_svldnt1_gather_u64offset_u64
137   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
138   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %offsets)
139   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
140   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
141   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_u64'}}
142   return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _u64)(pg, base, offsets);
143 }
144 
test_svldnt1_gather_u32offset_f32(svbool_t pg,const float32_t * base,svuint32_t offsets)145 svfloat32_t test_svldnt1_gather_u32offset_f32(svbool_t pg, const float32_t *base, svuint32_t offsets) {
146   // CHECK-LABEL: test_svldnt1_gather_u32offset_f32
147   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
148   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4f32(<vscale x 4 x i1> [[PG]], float* %base, <vscale x 4 x i32> %offsets)
149   // CHECK: ret <vscale x 4 x float> [[LOAD]]
150   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
151   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32offset_f32'}}
152   return SVE_ACLE_FUNC(svldnt1_gather_, u32, offset, _f32)(pg, base, offsets);
153 }
154 
test_svldnt1_gather_u64offset_f64(svbool_t pg,const float64_t * base,svuint64_t offsets)155 svfloat64_t test_svldnt1_gather_u64offset_f64(svbool_t pg, const float64_t *base, svuint64_t offsets) {
156   // CHECK-LABEL: test_svldnt1_gather_u64offset_f64
157   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
158   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %offsets)
159   // CHECK: ret <vscale x 2 x double> [[LOAD]]
160   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset'}}
161   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64offset_f64'}}
162   return SVE_ACLE_FUNC(svldnt1_gather_, u64, offset, _f64)(pg, base, offsets);
163 }
164 
test_svldnt1_gather_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset)165 svint32_t test_svldnt1_gather_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset) {
166   // CHECK-LABEL: test_svldnt1_gather_u32base_offset_s32
167   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
168   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
169   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
170   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_s32'}}
171   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_s32'}}
172   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_s32, )(pg, bases, offset);
173 }
174 
test_svldnt1_gather_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset)175 svint64_t test_svldnt1_gather_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset) {
176   // CHECK-LABEL: test_svldnt1_gather_u64base_offset_s64
177   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
178   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
179   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
180   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_s64'}}
181   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_s64'}}
182   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_s64, )(pg, bases, offset);
183 }
184 
test_svldnt1_gather_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset)185 svuint32_t test_svldnt1_gather_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset) {
186   // CHECK-LABEL: test_svldnt1_gather_u32base_offset_u32
187   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
188   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
189   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
190   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_u32'}}
191   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_u32'}}
192   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_u32, )(pg, bases, offset);
193 }
194 
test_svldnt1_gather_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset)195 svuint64_t test_svldnt1_gather_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset) {
196   // CHECK-LABEL: test_svldnt1_gather_u64base_offset_u64
197   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
198   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
199   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
200   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_u64'}}
201   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_u64'}}
202   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_u64, )(pg, bases, offset);
203 }
204 
test_svldnt1_gather_u32base_offset_f32(svbool_t pg,svuint32_t bases,int64_t offset)205 svfloat32_t test_svldnt1_gather_u32base_offset_f32(svbool_t pg, svuint32_t bases, int64_t offset) {
206   // CHECK-LABEL: test_svldnt1_gather_u32base_offset_f32
207   // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
208   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
209   // CHECK: ret <vscale x 4 x float> [[LOAD]]
210   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_f32'}}
211   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_offset_f32'}}
212   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _offset_f32, )(pg, bases, offset);
213 }
214 
test_svldnt1_gather_u64base_offset_f64(svbool_t pg,svuint64_t bases,int64_t offset)215 svfloat64_t test_svldnt1_gather_u64base_offset_f64(svbool_t pg, svuint64_t bases, int64_t offset) {
216   // CHECK-LABEL: test_svldnt1_gather_u64base_offset_f64
217   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
218   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
219   // CHECK: ret <vscale x 2 x double> [[LOAD]]
220   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_offset_f64'}}
221   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_offset_f64'}}
222   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _offset_f64, )(pg, bases, offset);
223 }
224 
test_svldnt1_gather_s64index_s64(svbool_t pg,const int64_t * base,svint64_t indices)225 svint64_t test_svldnt1_gather_s64index_s64(svbool_t pg, const int64_t *base, svint64_t indices) {
226   // CHECK-LABEL: test_svldnt1_gather_s64index_s64
227   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
228   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
229   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
230   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
231   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_s64'}}
232   return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _s64)(pg, base, indices);
233 }
234 
test_svldnt1_gather_s64index_u64(svbool_t pg,const uint64_t * base,svint64_t indices)235 svuint64_t test_svldnt1_gather_s64index_u64(svbool_t pg, const uint64_t *base, svint64_t indices) {
236   // CHECK-LABEL: test_svldnt1_gather_s64index_u64
237   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
238   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
239   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
240   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
241   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_u64'}}
242   return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _u64)(pg, base, indices);
243 }
244 
test_svldnt1_gather_s64index_f64(svbool_t pg,const float64_t * base,svint64_t indices)245 svfloat64_t test_svldnt1_gather_s64index_f64(svbool_t pg, const float64_t *base, svint64_t indices) {
246   // CHECK-LABEL: test_svldnt1_gather_s64index_f64
247   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
248   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.index.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %indices)
249   // CHECK: ret <vscale x 2 x double> [[LOAD]]
250   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
251   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_s64index_f64'}}
252   return SVE_ACLE_FUNC(svldnt1_gather_, s64, index, _f64)(pg, base, indices);
253 }
254 
test_svldnt1_gather_u64index_s64(svbool_t pg,const int64_t * base,svuint64_t indices)255 svint64_t test_svldnt1_gather_u64index_s64(svbool_t pg, const int64_t *base, svuint64_t indices) {
256   // CHECK-LABEL: test_svldnt1_gather_u64index_s64
257   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
258   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
259   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
260   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
261   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_s64'}}
262   return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _s64)(pg, base, indices);
263 }
264 
test_svldnt1_gather_u64index_u64(svbool_t pg,const uint64_t * base,svuint64_t indices)265 svuint64_t test_svldnt1_gather_u64index_u64(svbool_t pg, const uint64_t *base, svuint64_t indices) {
266   // CHECK-LABEL: test_svldnt1_gather_u64index_u64
267   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
268   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> [[PG]], i64* %base, <vscale x 2 x i64> %indices)
269   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
270   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
271   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_u64'}}
272   return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _u64)(pg, base, indices);
273 }
274 
test_svldnt1_gather_u64index_f64(svbool_t pg,const float64_t * base,svuint64_t indices)275 svfloat64_t test_svldnt1_gather_u64index_f64(svbool_t pg, const float64_t *base, svuint64_t indices) {
276   // CHECK-LABEL: test_svldnt1_gather_u64index_f64
277   // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
278   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.index.nxv2f64(<vscale x 2 x i1> [[PG]], double* %base, <vscale x 2 x i64> %indices)
279   // CHECK: ret <vscale x 2 x double> [[LOAD]]
280   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index'}}
281   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64index_f64'}}
282   return SVE_ACLE_FUNC(svldnt1_gather_, u64, index, _f64)(pg, base, indices);
283 }
284 
test_svldnt1_gather_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index)285 svint32_t test_svldnt1_gather_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index) {
286   // CHECK-LABEL: test_svldnt1_gather_u32base_index_s32
287   // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
288   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
289   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
290   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
291   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_s32'}}
292   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_s32'}}
293   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_s32, )(pg, bases, index);
294 }
295 
test_svldnt1_gather_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index)296 svint64_t test_svldnt1_gather_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index) {
297   // CHECK-LABEL: test_svldnt1_gather_u64base_index_s64
298   // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
299   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
300   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
301   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
302   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_s64'}}
303   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_s64'}}
304   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_s64, )(pg, bases, index);
305 }
306 
test_svldnt1_gather_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index)307 svuint32_t test_svldnt1_gather_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index) {
308   // CHECK-LABEL: test_svldnt1_gather_u32base_index_u32
309   // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
310   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
311   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
312   // CHECK: ret <vscale x 4 x i32> [[LOAD]]
313   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_u32'}}
314   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_u32'}}
315   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_u32, )(pg, bases, index);
316 }
317 
test_svldnt1_gather_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index)318 svuint64_t test_svldnt1_gather_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index) {
319   // CHECK-LABEL: test_svldnt1_gather_u64base_index_u64
320   // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
321   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
322   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
323   // CHECK: ret <vscale x 2 x i64> [[LOAD]]
324   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_u64'}}
325   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_u64'}}
326   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_u64, )(pg, bases, index);
327 }
328 
test_svldnt1_gather_u32base_index_f32(svbool_t pg,svuint32_t bases,int64_t index)329 svfloat32_t test_svldnt1_gather_u32base_index_f32(svbool_t pg, svuint32_t bases, int64_t index) {
330   // CHECK-LABEL: test_svldnt1_gather_u32base_index_f32
331   // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
332   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 2
333   // CHECK: [[LOAD:%.*]] = call <vscale x 4 x float> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4f32.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
334   // CHECK: ret <vscale x 4 x float> [[LOAD]]
335   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_f32'}}
336   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u32base_index_f32'}}
337   return SVE_ACLE_FUNC(svldnt1_gather, _u32base, _index_f32, )(pg, bases, index);
338 }
339 
test_svldnt1_gather_u64base_index_f64(svbool_t pg,svuint64_t bases,int64_t index)340 svfloat64_t test_svldnt1_gather_u64base_index_f64(svbool_t pg, svuint64_t bases, int64_t index) {
341   // CHECK-LABEL: test_svldnt1_gather_u64base_index_f64
342   // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
343   // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 3
344   // CHECK: [[LOAD:%.*]] = call <vscale x 2 x double> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2f64.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
345   // CHECK: ret <vscale x 2 x double> [[LOAD]]
346   // overload-warning@+2 {{implicit declaration of function 'svldnt1_gather_index_f64'}}
347   // expected-warning@+1 {{implicit declaration of function 'svldnt1_gather_u64base_index_f64'}}
348   return SVE_ACLE_FUNC(svldnt1_gather, _u64base, _index_f64, )(pg, bases, index);
349 }
350