1 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
4 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve2 -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s
5 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify -verify-ignore-unexpected=error %s
6 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -fsyntax-only -verify=overload -verify-ignore-unexpected=error %s
7
8 #include <arm_sve.h>
9
10 #ifdef SVE_OVERLOADED_FORMS
11 // A simple used,unused... macro, long enough to represent any SVE builtin.
12 #define SVE_ACLE_FUNC(A1, A2_UNUSED, A3, A4_UNUSED) A1##A3
13 #else
14 #define SVE_ACLE_FUNC(A1, A2, A3, A4) A1##A2##A3##A4
15 #endif
16
test_svldnt1sh_gather_u32base_s32(svbool_t pg,svuint32_t bases)17 svint32_t test_svldnt1sh_gather_u32base_s32(svbool_t pg, svuint32_t bases) {
18 // CHECK-LABEL: test_svldnt1sh_gather_u32base_s32
19 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
20 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
21 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
22 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
23 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_s32'}}
24 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_s32'}}
25 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _s32, )(pg, bases);
26 }
27
test_svldnt1sh_gather_u64base_s64(svbool_t pg,svuint64_t bases)28 svint64_t test_svldnt1sh_gather_u64base_s64(svbool_t pg, svuint64_t bases) {
29 // CHECK-LABEL: test_svldnt1sh_gather_u64base_s64
30 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
31 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
32 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
33 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
34 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_s64'}}
35 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_s64'}}
36 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _s64, )(pg, bases);
37 }
38
test_svldnt1sh_gather_u32base_u32(svbool_t pg,svuint32_t bases)39 svuint32_t test_svldnt1sh_gather_u32base_u32(svbool_t pg, svuint32_t bases) {
40 // CHECK-LABEL: test_svldnt1sh_gather_u32base_u32
41 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
42 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 0)
43 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
44 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
45 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_u32'}}
46 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_u32'}}
47 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _u32, )(pg, bases);
48 }
49
test_svldnt1sh_gather_u64base_u64(svbool_t pg,svuint64_t bases)50 svuint64_t test_svldnt1sh_gather_u64base_u64(svbool_t pg, svuint64_t bases) {
51 // CHECK-LABEL: test_svldnt1sh_gather_u64base_u64
52 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
53 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 0)
54 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
55 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
56 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_u64'}}
57 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_u64'}}
58 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _u64, )(pg, bases);
59 }
60
test_svldnt1sh_gather_s64offset_s64(svbool_t pg,const int16_t * base,svint64_t offsets)61 svint64_t test_svldnt1sh_gather_s64offset_s64(svbool_t pg, const int16_t *base, svint64_t offsets) {
62 // CHECK-LABEL: test_svldnt1sh_gather_s64offset_s64
63 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
64 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %offsets)
65 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
66 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
67 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_s64'}}
68 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_s64offset_s64'}}
69 return SVE_ACLE_FUNC(svldnt1sh_gather_, s64, offset_s64, )(pg, base, offsets);
70 }
71
test_svldnt1sh_gather_s64offset_u64(svbool_t pg,const int16_t * base,svint64_t offsets)72 svuint64_t test_svldnt1sh_gather_s64offset_u64(svbool_t pg, const int16_t *base, svint64_t offsets) {
73 // CHECK-LABEL: test_svldnt1sh_gather_s64offset_u64
74 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
75 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %offsets)
76 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
77 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
78 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_u64'}}
79 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_s64offset_u64'}}
80 return SVE_ACLE_FUNC(svldnt1sh_gather_, s64, offset_u64, )(pg, base, offsets);
81 }
82
test_svldnt1sh_gather_u32offset_s32(svbool_t pg,const int16_t * base,svuint32_t offsets)83 svint32_t test_svldnt1sh_gather_u32offset_s32(svbool_t pg, const int16_t *base, svuint32_t offsets) {
84 // CHECK-LABEL: test_svldnt1sh_gather_u32offset_s32
85 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
86 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i16(<vscale x 4 x i1> [[PG]], i16* %base, <vscale x 4 x i32> %offsets)
87 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
88 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
89 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_s32'}}
90 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32offset_s32'}}
91 return SVE_ACLE_FUNC(svldnt1sh_gather_, u32, offset_s32, )(pg, base, offsets);
92 }
93
test_svldnt1sh_gather_u64offset_s64(svbool_t pg,const int16_t * base,svuint64_t offsets)94 svint64_t test_svldnt1sh_gather_u64offset_s64(svbool_t pg, const int16_t *base, svuint64_t offsets) {
95 // CHECK-LABEL: test_svldnt1sh_gather_u64offset_s64
96 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
97 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %offsets)
98 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
99 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
100 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_s64'}}
101 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64offset_s64'}}
102 return SVE_ACLE_FUNC(svldnt1sh_gather_, u64, offset_s64, )(pg, base, offsets);
103 }
104
test_svldnt1sh_gather_u32offset_u32(svbool_t pg,const int16_t * base,svuint32_t offsets)105 svuint32_t test_svldnt1sh_gather_u32offset_u32(svbool_t pg, const int16_t *base, svuint32_t offsets) {
106 // CHECK-LABEL: test_svldnt1sh_gather_u32offset_u32
107 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
108 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i16(<vscale x 4 x i1> [[PG]], i16* %base, <vscale x 4 x i32> %offsets)
109 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
110 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
111 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_u32'}}
112 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32offset_u32'}}
113 return SVE_ACLE_FUNC(svldnt1sh_gather_, u32, offset_u32, )(pg, base, offsets);
114 }
115
test_svldnt1sh_gather_u64offset_u64(svbool_t pg,const int16_t * base,svuint64_t offsets)116 svuint64_t test_svldnt1sh_gather_u64offset_u64(svbool_t pg, const int16_t *base, svuint64_t offsets) {
117 // CHECK-LABEL: test_svldnt1sh_gather_u64offset_u64
118 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
119 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %offsets)
120 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
121 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
122 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_u64'}}
123 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64offset_u64'}}
124 return SVE_ACLE_FUNC(svldnt1sh_gather_, u64, offset_u64, )(pg, base, offsets);
125 }
126
test_svldnt1sh_gather_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset)127 svint32_t test_svldnt1sh_gather_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset) {
128 // CHECK-LABEL: test_svldnt1sh_gather_u32base_offset_s32
129 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
130 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
131 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
132 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
133 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_s32'}}
134 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_offset_s32'}}
135 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _offset_s32, )(pg, bases, offset);
136 }
137
test_svldnt1sh_gather_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset)138 svint64_t test_svldnt1sh_gather_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset) {
139 // CHECK-LABEL: test_svldnt1sh_gather_u64base_offset_s64
140 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
141 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
142 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
143 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
144 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_s64'}}
145 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_offset_s64'}}
146 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _offset_s64, )(pg, bases, offset);
147 }
148
test_svldnt1sh_gather_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset)149 svuint32_t test_svldnt1sh_gather_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset) {
150 // CHECK-LABEL: test_svldnt1sh_gather_u32base_offset_u32
151 // CHECK: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
152 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 %offset)
153 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
154 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
155 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_u32'}}
156 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_offset_u32'}}
157 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _offset_u32, )(pg, bases, offset);
158 }
159
test_svldnt1sh_gather_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset)160 svuint64_t test_svldnt1sh_gather_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset) {
161 // CHECK-LABEL: test_svldnt1sh_gather_u64base_offset_u64
162 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
163 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 %offset)
164 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
165 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
166 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_offset_u64'}}
167 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_offset_u64'}}
168 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _offset_u64, )(pg, bases, offset);
169 }
170
test_svldnt1sh_gather_s64index_s64(svbool_t pg,const int16_t * base,svint64_t indices)171 svint64_t test_svldnt1sh_gather_s64index_s64(svbool_t pg, const int16_t *base, svint64_t indices) {
172 // CHECK-LABEL: test_svldnt1sh_gather_s64index_s64
173 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
174 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %indices)
175 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
176 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
177 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_s64'}}
178 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_s64index_s64'}}
179 return SVE_ACLE_FUNC(svldnt1sh_gather_, s64, index_s64, )(pg, base, indices);
180 }
181
test_svldnt1sh_gather_s64index_u64(svbool_t pg,const int16_t * base,svint64_t indices)182 svuint64_t test_svldnt1sh_gather_s64index_u64(svbool_t pg, const int16_t *base, svint64_t indices) {
183 // CHECK-LABEL: test_svldnt1sh_gather_s64index_u64
184 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
185 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %indices)
186 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
187 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
188 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_u64'}}
189 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_s64index_u64'}}
190 return SVE_ACLE_FUNC(svldnt1sh_gather_, s64, index_u64, )(pg, base, indices);
191 }
192
test_svldnt1sh_gather_u64index_s64(svbool_t pg,const int16_t * base,svuint64_t indices)193 svint64_t test_svldnt1sh_gather_u64index_s64(svbool_t pg, const int16_t *base, svuint64_t indices) {
194 // CHECK-LABEL: test_svldnt1sh_gather_u64index_s64
195 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
196 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %indices)
197 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
198 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
199 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_s64'}}
200 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64index_s64'}}
201 return SVE_ACLE_FUNC(svldnt1sh_gather_, u64, index_s64, )(pg, base, indices);
202 }
203
test_svldnt1sh_gather_u64index_u64(svbool_t pg,const int16_t * base,svuint64_t indices)204 svuint64_t test_svldnt1sh_gather_u64index_u64(svbool_t pg, const int16_t *base, svuint64_t indices) {
205 // CHECK-LABEL: test_svldnt1sh_gather_u64index_u64
206 // CHECK: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
207 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i16(<vscale x 2 x i1> [[PG]], i16* %base, <vscale x 2 x i64> %indices)
208 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
209 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
210 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_u64'}}
211 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64index_u64'}}
212 return SVE_ACLE_FUNC(svldnt1sh_gather_, u64, index_u64, )(pg, base, indices);
213 }
214
test_svldnt1sh_gather_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index)215 svint32_t test_svldnt1sh_gather_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index) {
216 // CHECK-LABEL: test_svldnt1sh_gather_u32base_index_s32
217 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
218 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 1
219 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
220 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
221 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
222 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_s32'}}
223 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_index_s32'}}
224 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _index_s32, )(pg, bases, index);
225 }
226
test_svldnt1sh_gather_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index)227 svint64_t test_svldnt1sh_gather_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index) {
228 // CHECK-LABEL: test_svldnt1sh_gather_u64base_index_s64
229 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
230 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 1
231 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
232 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
233 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
234 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_s64'}}
235 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_index_s64'}}
236 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _index_s64, )(pg, bases, index);
237 }
238
test_svldnt1sh_gather_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index)239 svuint32_t test_svldnt1sh_gather_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index) {
240 // CHECK-LABEL: test_svldnt1sh_gather_u32base_index_u32
241 // CHECK-DAG: [[PG:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
242 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 1
243 // CHECK: [[LOAD:%.*]] = call <vscale x 4 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i1> [[PG]], <vscale x 4 x i32> %bases, i64 [[SHL]])
244 // CHECK: [[SEXT:%.*]] = sext <vscale x 4 x i16> [[LOAD]] to <vscale x 4 x i32>
245 // CHECK: ret <vscale x 4 x i32> [[SEXT]]
246 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_u32'}}
247 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u32base_index_u32'}}
248 return SVE_ACLE_FUNC(svldnt1sh_gather, _u32base, _index_u32, )(pg, bases, index);
249 }
250
test_svldnt1sh_gather_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index)251 svuint64_t test_svldnt1sh_gather_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index) {
252 // CHECK-LABEL: test_svldnt1sh_gather_u64base_index_u64
253 // CHECK-DAG: [[PG:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
254 // CHECK-DAG: [[SHL:%.*]] = shl i64 %index, 1
255 // CHECK: [[LOAD:%.*]] = call <vscale x 2 x i16> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i1> [[PG]], <vscale x 2 x i64> %bases, i64 [[SHL]])
256 // CHECK: [[SEXT:%.*]] = sext <vscale x 2 x i16> [[LOAD]] to <vscale x 2 x i64>
257 // CHECK: ret <vscale x 2 x i64> [[SEXT]]
258 // overload-warning@+2 {{implicit declaration of function 'svldnt1sh_gather_index_u64'}}
259 // expected-warning@+1 {{implicit declaration of function 'svldnt1sh_gather_u64base_index_u64'}}
260 return SVE_ACLE_FUNC(svldnt1sh_gather, _u64base, _index_u64, )(pg, bases, index);
261 }
262