1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \
4 // RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
5
6 #include <riscv_vector.h>
7
8 //
9 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8(
10 // CHECK-RV64-NEXT: entry:
11 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
12 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
13 // CHECK-RV64-NEXT: ret void
14 //
test_vsoxei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t value,size_t vl)15 void test_vsoxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value,
16 size_t vl) {
17 return vsoxei8_v_i8mf8(base, bindex, value, vl);
18 }
19
20 //
21 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4(
22 // CHECK-RV64-NEXT: entry:
23 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
24 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
25 // CHECK-RV64-NEXT: ret void
26 //
test_vsoxei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t value,size_t vl)27 void test_vsoxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value,
28 size_t vl) {
29 return vsoxei8_v_i8mf4(base, bindex, value, vl);
30 }
31
32 //
33 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2(
34 // CHECK-RV64-NEXT: entry:
35 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
36 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
37 // CHECK-RV64-NEXT: ret void
38 //
test_vsoxei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t value,size_t vl)39 void test_vsoxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value,
40 size_t vl) {
41 return vsoxei8_v_i8mf2(base, bindex, value, vl);
42 }
43
44 //
45 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1(
46 // CHECK-RV64-NEXT: entry:
47 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
48 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
49 // CHECK-RV64-NEXT: ret void
50 //
test_vsoxei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t value,size_t vl)51 void test_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value,
52 size_t vl) {
53 return vsoxei8_v_i8m1(base, bindex, value, vl);
54 }
55
56 //
57 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2(
58 // CHECK-RV64-NEXT: entry:
59 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
60 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
61 // CHECK-RV64-NEXT: ret void
62 //
test_vsoxei8_v_i8m2(int8_t * base,vuint8m2_t bindex,vint8m2_t value,size_t vl)63 void test_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value,
64 size_t vl) {
65 return vsoxei8_v_i8m2(base, bindex, value, vl);
66 }
67
68 //
69 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4(
70 // CHECK-RV64-NEXT: entry:
71 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
72 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
73 // CHECK-RV64-NEXT: ret void
74 //
test_vsoxei8_v_i8m4(int8_t * base,vuint8m4_t bindex,vint8m4_t value,size_t vl)75 void test_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value,
76 size_t vl) {
77 return vsoxei8_v_i8m4(base, bindex, value, vl);
78 }
79
80 //
81 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8(
82 // CHECK-RV64-NEXT: entry:
83 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
84 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
85 // CHECK-RV64-NEXT: ret void
86 //
test_vsoxei8_v_i8m8(int8_t * base,vuint8m8_t bindex,vint8m8_t value,size_t vl)87 void test_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value,
88 size_t vl) {
89 return vsoxei8_v_i8m8(base, bindex, value, vl);
90 }
91
92 //
93 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8(
94 // CHECK-RV64-NEXT: entry:
95 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
96 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
97 // CHECK-RV64-NEXT: ret void
98 //
test_vsoxei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t value,size_t vl)99 void test_vsoxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value,
100 size_t vl) {
101 return vsoxei16_v_i8mf8(base, bindex, value, vl);
102 }
103
104 //
105 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4(
106 // CHECK-RV64-NEXT: entry:
107 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
108 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
109 // CHECK-RV64-NEXT: ret void
110 //
test_vsoxei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t value,size_t vl)111 void test_vsoxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value,
112 size_t vl) {
113 return vsoxei16_v_i8mf4(base, bindex, value, vl);
114 }
115
116 //
117 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2(
118 // CHECK-RV64-NEXT: entry:
119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
120 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
121 // CHECK-RV64-NEXT: ret void
122 //
test_vsoxei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t value,size_t vl)123 void test_vsoxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value,
124 size_t vl) {
125 return vsoxei16_v_i8mf2(base, bindex, value, vl);
126 }
127
128 //
129 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1(
130 // CHECK-RV64-NEXT: entry:
131 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
132 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
133 // CHECK-RV64-NEXT: ret void
134 //
test_vsoxei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t value,size_t vl)135 void test_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value,
136 size_t vl) {
137 return vsoxei16_v_i8m1(base, bindex, value, vl);
138 }
139
140 //
141 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2(
142 // CHECK-RV64-NEXT: entry:
143 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
144 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
145 // CHECK-RV64-NEXT: ret void
146 //
test_vsoxei16_v_i8m2(int8_t * base,vuint16m4_t bindex,vint8m2_t value,size_t vl)147 void test_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value,
148 size_t vl) {
149 return vsoxei16_v_i8m2(base, bindex, value, vl);
150 }
151
152 //
153 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4(
154 // CHECK-RV64-NEXT: entry:
155 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
156 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
157 // CHECK-RV64-NEXT: ret void
158 //
test_vsoxei16_v_i8m4(int8_t * base,vuint16m8_t bindex,vint8m4_t value,size_t vl)159 void test_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value,
160 size_t vl) {
161 return vsoxei16_v_i8m4(base, bindex, value, vl);
162 }
163
164 //
165 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8(
166 // CHECK-RV64-NEXT: entry:
167 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
168 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
169 // CHECK-RV64-NEXT: ret void
170 //
test_vsoxei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t value,size_t vl)171 void test_vsoxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value,
172 size_t vl) {
173 return vsoxei32_v_i8mf8(base, bindex, value, vl);
174 }
175
176 //
177 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4(
178 // CHECK-RV64-NEXT: entry:
179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
180 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
181 // CHECK-RV64-NEXT: ret void
182 //
test_vsoxei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t value,size_t vl)183 void test_vsoxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value,
184 size_t vl) {
185 return vsoxei32_v_i8mf4(base, bindex, value, vl);
186 }
187
188 //
189 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2(
190 // CHECK-RV64-NEXT: entry:
191 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
192 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
193 // CHECK-RV64-NEXT: ret void
194 //
test_vsoxei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t value,size_t vl)195 void test_vsoxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value,
196 size_t vl) {
197 return vsoxei32_v_i8mf2(base, bindex, value, vl);
198 }
199
200 //
201 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1(
202 // CHECK-RV64-NEXT: entry:
203 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
204 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
205 // CHECK-RV64-NEXT: ret void
206 //
test_vsoxei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t value,size_t vl)207 void test_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value,
208 size_t vl) {
209 return vsoxei32_v_i8m1(base, bindex, value, vl);
210 }
211
212 //
213 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2(
214 // CHECK-RV64-NEXT: entry:
215 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
216 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
217 // CHECK-RV64-NEXT: ret void
218 //
test_vsoxei32_v_i8m2(int8_t * base,vuint32m8_t bindex,vint8m2_t value,size_t vl)219 void test_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value,
220 size_t vl) {
221 return vsoxei32_v_i8m2(base, bindex, value, vl);
222 }
223
224 //
225 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8(
226 // CHECK-RV64-NEXT: entry:
227 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
228 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
229 // CHECK-RV64-NEXT: ret void
230 //
test_vsoxei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t value,size_t vl)231 void test_vsoxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value,
232 size_t vl) {
233 return vsoxei64_v_i8mf8(base, bindex, value, vl);
234 }
235
236 //
237 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4(
238 // CHECK-RV64-NEXT: entry:
239 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
240 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
241 // CHECK-RV64-NEXT: ret void
242 //
test_vsoxei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t value,size_t vl)243 void test_vsoxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value,
244 size_t vl) {
245 return vsoxei64_v_i8mf4(base, bindex, value, vl);
246 }
247
248 //
249 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2(
250 // CHECK-RV64-NEXT: entry:
251 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
252 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
253 // CHECK-RV64-NEXT: ret void
254 //
test_vsoxei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t value,size_t vl)255 void test_vsoxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value,
256 size_t vl) {
257 return vsoxei64_v_i8mf2(base, bindex, value, vl);
258 }
259
260 //
261 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1(
262 // CHECK-RV64-NEXT: entry:
263 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
264 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
265 // CHECK-RV64-NEXT: ret void
266 //
test_vsoxei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t value,size_t vl)267 void test_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value,
268 size_t vl) {
269 return vsoxei64_v_i8m1(base, bindex, value, vl);
270 }
271
272 //
273 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4(
274 // CHECK-RV64-NEXT: entry:
275 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
276 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
277 // CHECK-RV64-NEXT: ret void
278 //
test_vsoxei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t value,size_t vl)279 void test_vsoxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value,
280 size_t vl) {
281 return vsoxei8_v_i16mf4(base, bindex, value, vl);
282 }
283
284 //
285 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2(
286 // CHECK-RV64-NEXT: entry:
287 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
288 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
289 // CHECK-RV64-NEXT: ret void
290 //
test_vsoxei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t value,size_t vl)291 void test_vsoxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value,
292 size_t vl) {
293 return vsoxei8_v_i16mf2(base, bindex, value, vl);
294 }
295
296 //
297 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1(
298 // CHECK-RV64-NEXT: entry:
299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
300 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
301 // CHECK-RV64-NEXT: ret void
302 //
test_vsoxei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t value,size_t vl)303 void test_vsoxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value,
304 size_t vl) {
305 return vsoxei8_v_i16m1(base, bindex, value, vl);
306 }
307
308 //
309 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2(
310 // CHECK-RV64-NEXT: entry:
311 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
312 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
313 // CHECK-RV64-NEXT: ret void
314 //
test_vsoxei8_v_i16m2(int16_t * base,vuint8m1_t bindex,vint16m2_t value,size_t vl)315 void test_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value,
316 size_t vl) {
317 return vsoxei8_v_i16m2(base, bindex, value, vl);
318 }
319
320 //
321 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4(
322 // CHECK-RV64-NEXT: entry:
323 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
324 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
325 // CHECK-RV64-NEXT: ret void
326 //
test_vsoxei8_v_i16m4(int16_t * base,vuint8m2_t bindex,vint16m4_t value,size_t vl)327 void test_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value,
328 size_t vl) {
329 return vsoxei8_v_i16m4(base, bindex, value, vl);
330 }
331
332 //
333 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8(
334 // CHECK-RV64-NEXT: entry:
335 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
336 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
337 // CHECK-RV64-NEXT: ret void
338 //
test_vsoxei8_v_i16m8(int16_t * base,vuint8m4_t bindex,vint16m8_t value,size_t vl)339 void test_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value,
340 size_t vl) {
341 return vsoxei8_v_i16m8(base, bindex, value, vl);
342 }
343
344 //
345 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4(
346 // CHECK-RV64-NEXT: entry:
347 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
348 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
349 // CHECK-RV64-NEXT: ret void
350 //
test_vsoxei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t value,size_t vl)351 void test_vsoxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex,
352 vint16mf4_t value, size_t vl) {
353 return vsoxei16_v_i16mf4(base, bindex, value, vl);
354 }
355
356 //
357 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2(
358 // CHECK-RV64-NEXT: entry:
359 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
360 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
361 // CHECK-RV64-NEXT: ret void
362 //
test_vsoxei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t value,size_t vl)363 void test_vsoxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex,
364 vint16mf2_t value, size_t vl) {
365 return vsoxei16_v_i16mf2(base, bindex, value, vl);
366 }
367
368 //
369 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1(
370 // CHECK-RV64-NEXT: entry:
371 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
372 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
373 // CHECK-RV64-NEXT: ret void
374 //
test_vsoxei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t value,size_t vl)375 void test_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value,
376 size_t vl) {
377 return vsoxei16_v_i16m1(base, bindex, value, vl);
378 }
379
380 //
381 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2(
382 // CHECK-RV64-NEXT: entry:
383 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
384 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
385 // CHECK-RV64-NEXT: ret void
386 //
test_vsoxei16_v_i16m2(int16_t * base,vuint16m2_t bindex,vint16m2_t value,size_t vl)387 void test_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value,
388 size_t vl) {
389 return vsoxei16_v_i16m2(base, bindex, value, vl);
390 }
391
392 //
393 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4(
394 // CHECK-RV64-NEXT: entry:
395 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
396 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
397 // CHECK-RV64-NEXT: ret void
398 //
test_vsoxei16_v_i16m4(int16_t * base,vuint16m4_t bindex,vint16m4_t value,size_t vl)399 void test_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value,
400 size_t vl) {
401 return vsoxei16_v_i16m4(base, bindex, value, vl);
402 }
403
404 //
405 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8(
406 // CHECK-RV64-NEXT: entry:
407 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
408 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
409 // CHECK-RV64-NEXT: ret void
410 //
test_vsoxei16_v_i16m8(int16_t * base,vuint16m8_t bindex,vint16m8_t value,size_t vl)411 void test_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value,
412 size_t vl) {
413 return vsoxei16_v_i16m8(base, bindex, value, vl);
414 }
415
416 //
417 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4(
418 // CHECK-RV64-NEXT: entry:
419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
420 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
421 // CHECK-RV64-NEXT: ret void
422 //
test_vsoxei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t value,size_t vl)423 void test_vsoxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex,
424 vint16mf4_t value, size_t vl) {
425 return vsoxei32_v_i16mf4(base, bindex, value, vl);
426 }
427
428 //
429 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2(
430 // CHECK-RV64-NEXT: entry:
431 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
432 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
433 // CHECK-RV64-NEXT: ret void
434 //
test_vsoxei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t value,size_t vl)435 void test_vsoxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex,
436 vint16mf2_t value, size_t vl) {
437 return vsoxei32_v_i16mf2(base, bindex, value, vl);
438 }
439
440 //
441 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1(
442 // CHECK-RV64-NEXT: entry:
443 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
444 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
445 // CHECK-RV64-NEXT: ret void
446 //
test_vsoxei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t value,size_t vl)447 void test_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value,
448 size_t vl) {
449 return vsoxei32_v_i16m1(base, bindex, value, vl);
450 }
451
452 //
453 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2(
454 // CHECK-RV64-NEXT: entry:
455 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
456 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
457 // CHECK-RV64-NEXT: ret void
458 //
test_vsoxei32_v_i16m2(int16_t * base,vuint32m4_t bindex,vint16m2_t value,size_t vl)459 void test_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value,
460 size_t vl) {
461 return vsoxei32_v_i16m2(base, bindex, value, vl);
462 }
463
464 //
465 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4(
466 // CHECK-RV64-NEXT: entry:
467 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
468 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
469 // CHECK-RV64-NEXT: ret void
470 //
test_vsoxei32_v_i16m4(int16_t * base,vuint32m8_t bindex,vint16m4_t value,size_t vl)471 void test_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value,
472 size_t vl) {
473 return vsoxei32_v_i16m4(base, bindex, value, vl);
474 }
475
476 //
477 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4(
478 // CHECK-RV64-NEXT: entry:
479 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
480 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
481 // CHECK-RV64-NEXT: ret void
482 //
test_vsoxei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t value,size_t vl)483 void test_vsoxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex,
484 vint16mf4_t value, size_t vl) {
485 return vsoxei64_v_i16mf4(base, bindex, value, vl);
486 }
487
488 //
489 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2(
490 // CHECK-RV64-NEXT: entry:
491 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
492 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
493 // CHECK-RV64-NEXT: ret void
494 //
test_vsoxei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t value,size_t vl)495 void test_vsoxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex,
496 vint16mf2_t value, size_t vl) {
497 return vsoxei64_v_i16mf2(base, bindex, value, vl);
498 }
499
500 //
501 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1(
502 // CHECK-RV64-NEXT: entry:
503 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
504 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
505 // CHECK-RV64-NEXT: ret void
506 //
test_vsoxei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t value,size_t vl)507 void test_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value,
508 size_t vl) {
509 return vsoxei64_v_i16m1(base, bindex, value, vl);
510 }
511
512 //
513 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2(
514 // CHECK-RV64-NEXT: entry:
515 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
516 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
517 // CHECK-RV64-NEXT: ret void
518 //
test_vsoxei64_v_i16m2(int16_t * base,vuint64m8_t bindex,vint16m2_t value,size_t vl)519 void test_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value,
520 size_t vl) {
521 return vsoxei64_v_i16m2(base, bindex, value, vl);
522 }
523
524 //
525 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2(
526 // CHECK-RV64-NEXT: entry:
527 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
528 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
529 // CHECK-RV64-NEXT: ret void
530 //
test_vsoxei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t value,size_t vl)531 void test_vsoxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value,
532 size_t vl) {
533 return vsoxei8_v_i32mf2(base, bindex, value, vl);
534 }
535
536 //
537 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1(
538 // CHECK-RV64-NEXT: entry:
539 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
540 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
541 // CHECK-RV64-NEXT: ret void
542 //
test_vsoxei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t value,size_t vl)543 void test_vsoxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value,
544 size_t vl) {
545 return vsoxei8_v_i32m1(base, bindex, value, vl);
546 }
547
548 //
549 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2(
550 // CHECK-RV64-NEXT: entry:
551 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
552 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
553 // CHECK-RV64-NEXT: ret void
554 //
test_vsoxei8_v_i32m2(int32_t * base,vuint8mf2_t bindex,vint32m2_t value,size_t vl)555 void test_vsoxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value,
556 size_t vl) {
557 return vsoxei8_v_i32m2(base, bindex, value, vl);
558 }
559
560 //
561 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4(
562 // CHECK-RV64-NEXT: entry:
563 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
564 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
565 // CHECK-RV64-NEXT: ret void
566 //
test_vsoxei8_v_i32m4(int32_t * base,vuint8m1_t bindex,vint32m4_t value,size_t vl)567 void test_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value,
568 size_t vl) {
569 return vsoxei8_v_i32m4(base, bindex, value, vl);
570 }
571
572 //
573 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8(
574 // CHECK-RV64-NEXT: entry:
575 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
576 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
577 // CHECK-RV64-NEXT: ret void
578 //
test_vsoxei8_v_i32m8(int32_t * base,vuint8m2_t bindex,vint32m8_t value,size_t vl)579 void test_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value,
580 size_t vl) {
581 return vsoxei8_v_i32m8(base, bindex, value, vl);
582 }
583
584 //
585 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2(
586 // CHECK-RV64-NEXT: entry:
587 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
588 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
589 // CHECK-RV64-NEXT: ret void
590 //
test_vsoxei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t value,size_t vl)591 void test_vsoxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex,
592 vint32mf2_t value, size_t vl) {
593 return vsoxei16_v_i32mf2(base, bindex, value, vl);
594 }
595
596 //
597 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1(
598 // CHECK-RV64-NEXT: entry:
599 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
600 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
601 // CHECK-RV64-NEXT: ret void
602 //
test_vsoxei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t value,size_t vl)603 void test_vsoxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value,
604 size_t vl) {
605 return vsoxei16_v_i32m1(base, bindex, value, vl);
606 }
607
608 //
609 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2(
610 // CHECK-RV64-NEXT: entry:
611 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
612 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
613 // CHECK-RV64-NEXT: ret void
614 //
test_vsoxei16_v_i32m2(int32_t * base,vuint16m1_t bindex,vint32m2_t value,size_t vl)615 void test_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value,
616 size_t vl) {
617 return vsoxei16_v_i32m2(base, bindex, value, vl);
618 }
619
620 //
621 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4(
622 // CHECK-RV64-NEXT: entry:
623 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
624 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
625 // CHECK-RV64-NEXT: ret void
626 //
test_vsoxei16_v_i32m4(int32_t * base,vuint16m2_t bindex,vint32m4_t value,size_t vl)627 void test_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value,
628 size_t vl) {
629 return vsoxei16_v_i32m4(base, bindex, value, vl);
630 }
631
632 //
633 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8(
634 // CHECK-RV64-NEXT: entry:
635 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
636 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
637 // CHECK-RV64-NEXT: ret void
638 //
test_vsoxei16_v_i32m8(int32_t * base,vuint16m4_t bindex,vint32m8_t value,size_t vl)639 void test_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value,
640 size_t vl) {
641 return vsoxei16_v_i32m8(base, bindex, value, vl);
642 }
643
644 //
645 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2(
646 // CHECK-RV64-NEXT: entry:
647 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
648 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
649 // CHECK-RV64-NEXT: ret void
650 //
test_vsoxei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t value,size_t vl)651 void test_vsoxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex,
652 vint32mf2_t value, size_t vl) {
653 return vsoxei32_v_i32mf2(base, bindex, value, vl);
654 }
655
656 //
657 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1(
658 // CHECK-RV64-NEXT: entry:
659 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
660 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
661 // CHECK-RV64-NEXT: ret void
662 //
test_vsoxei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t value,size_t vl)663 void test_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value,
664 size_t vl) {
665 return vsoxei32_v_i32m1(base, bindex, value, vl);
666 }
667
668 //
669 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2(
670 // CHECK-RV64-NEXT: entry:
671 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
672 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
673 // CHECK-RV64-NEXT: ret void
674 //
test_vsoxei32_v_i32m2(int32_t * base,vuint32m2_t bindex,vint32m2_t value,size_t vl)675 void test_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value,
676 size_t vl) {
677 return vsoxei32_v_i32m2(base, bindex, value, vl);
678 }
679
680 //
681 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4(
682 // CHECK-RV64-NEXT: entry:
683 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
684 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
685 // CHECK-RV64-NEXT: ret void
686 //
test_vsoxei32_v_i32m4(int32_t * base,vuint32m4_t bindex,vint32m4_t value,size_t vl)687 void test_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value,
688 size_t vl) {
689 return vsoxei32_v_i32m4(base, bindex, value, vl);
690 }
691
692 //
693 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8(
694 // CHECK-RV64-NEXT: entry:
695 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
696 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
697 // CHECK-RV64-NEXT: ret void
698 //
test_vsoxei32_v_i32m8(int32_t * base,vuint32m8_t bindex,vint32m8_t value,size_t vl)699 void test_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value,
700 size_t vl) {
701 return vsoxei32_v_i32m8(base, bindex, value, vl);
702 }
703
704 //
705 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2(
706 // CHECK-RV64-NEXT: entry:
707 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
708 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
709 // CHECK-RV64-NEXT: ret void
710 //
test_vsoxei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t value,size_t vl)711 void test_vsoxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex,
712 vint32mf2_t value, size_t vl) {
713 return vsoxei64_v_i32mf2(base, bindex, value, vl);
714 }
715
716 //
717 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1(
718 // CHECK-RV64-NEXT: entry:
719 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
720 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
721 // CHECK-RV64-NEXT: ret void
722 //
test_vsoxei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t value,size_t vl)723 void test_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value,
724 size_t vl) {
725 return vsoxei64_v_i32m1(base, bindex, value, vl);
726 }
727
728 //
729 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2(
730 // CHECK-RV64-NEXT: entry:
731 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
732 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
733 // CHECK-RV64-NEXT: ret void
734 //
test_vsoxei64_v_i32m2(int32_t * base,vuint64m4_t bindex,vint32m2_t value,size_t vl)735 void test_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value,
736 size_t vl) {
737 return vsoxei64_v_i32m2(base, bindex, value, vl);
738 }
739
740 //
741 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4(
742 // CHECK-RV64-NEXT: entry:
743 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
744 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
745 // CHECK-RV64-NEXT: ret void
746 //
test_vsoxei64_v_i32m4(int32_t * base,vuint64m8_t bindex,vint32m4_t value,size_t vl)747 void test_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value,
748 size_t vl) {
749 return vsoxei64_v_i32m4(base, bindex, value, vl);
750 }
751
752 //
753 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1(
754 // CHECK-RV64-NEXT: entry:
755 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
756 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
757 // CHECK-RV64-NEXT: ret void
758 //
test_vsoxei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t value,size_t vl)759 void test_vsoxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value,
760 size_t vl) {
761 return vsoxei8_v_i64m1(base, bindex, value, vl);
762 }
763
764 //
765 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2(
766 // CHECK-RV64-NEXT: entry:
767 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
768 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
769 // CHECK-RV64-NEXT: ret void
770 //
test_vsoxei8_v_i64m2(int64_t * base,vuint8mf4_t bindex,vint64m2_t value,size_t vl)771 void test_vsoxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value,
772 size_t vl) {
773 return vsoxei8_v_i64m2(base, bindex, value, vl);
774 }
775
776 //
777 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4(
778 // CHECK-RV64-NEXT: entry:
779 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
780 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
781 // CHECK-RV64-NEXT: ret void
782 //
test_vsoxei8_v_i64m4(int64_t * base,vuint8mf2_t bindex,vint64m4_t value,size_t vl)783 void test_vsoxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value,
784 size_t vl) {
785 return vsoxei8_v_i64m4(base, bindex, value, vl);
786 }
787
788 //
789 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8(
790 // CHECK-RV64-NEXT: entry:
791 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
792 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
793 // CHECK-RV64-NEXT: ret void
794 //
test_vsoxei8_v_i64m8(int64_t * base,vuint8m1_t bindex,vint64m8_t value,size_t vl)795 void test_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value,
796 size_t vl) {
797 return vsoxei8_v_i64m8(base, bindex, value, vl);
798 }
799
800 //
801 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1(
802 // CHECK-RV64-NEXT: entry:
803 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
804 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
805 // CHECK-RV64-NEXT: ret void
806 //
test_vsoxei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t value,size_t vl)807 void test_vsoxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value,
808 size_t vl) {
809 return vsoxei16_v_i64m1(base, bindex, value, vl);
810 }
811
812 //
813 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2(
814 // CHECK-RV64-NEXT: entry:
815 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
816 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
817 // CHECK-RV64-NEXT: ret void
818 //
test_vsoxei16_v_i64m2(int64_t * base,vuint16mf2_t bindex,vint64m2_t value,size_t vl)819 void test_vsoxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value,
820 size_t vl) {
821 return vsoxei16_v_i64m2(base, bindex, value, vl);
822 }
823
824 //
825 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4(
826 // CHECK-RV64-NEXT: entry:
827 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
828 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
829 // CHECK-RV64-NEXT: ret void
830 //
test_vsoxei16_v_i64m4(int64_t * base,vuint16m1_t bindex,vint64m4_t value,size_t vl)831 void test_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value,
832 size_t vl) {
833 return vsoxei16_v_i64m4(base, bindex, value, vl);
834 }
835
836 //
837 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8(
838 // CHECK-RV64-NEXT: entry:
839 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
840 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
841 // CHECK-RV64-NEXT: ret void
842 //
test_vsoxei16_v_i64m8(int64_t * base,vuint16m2_t bindex,vint64m8_t value,size_t vl)843 void test_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value,
844 size_t vl) {
845 return vsoxei16_v_i64m8(base, bindex, value, vl);
846 }
847
848 //
849 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1(
850 // CHECK-RV64-NEXT: entry:
851 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
852 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
853 // CHECK-RV64-NEXT: ret void
854 //
test_vsoxei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t value,size_t vl)855 void test_vsoxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value,
856 size_t vl) {
857 return vsoxei32_v_i64m1(base, bindex, value, vl);
858 }
859
860 //
861 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2(
862 // CHECK-RV64-NEXT: entry:
863 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
864 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
865 // CHECK-RV64-NEXT: ret void
866 //
test_vsoxei32_v_i64m2(int64_t * base,vuint32m1_t bindex,vint64m2_t value,size_t vl)867 void test_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value,
868 size_t vl) {
869 return vsoxei32_v_i64m2(base, bindex, value, vl);
870 }
871
872 //
873 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4(
874 // CHECK-RV64-NEXT: entry:
875 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
876 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
877 // CHECK-RV64-NEXT: ret void
878 //
test_vsoxei32_v_i64m4(int64_t * base,vuint32m2_t bindex,vint64m4_t value,size_t vl)879 void test_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value,
880 size_t vl) {
881 return vsoxei32_v_i64m4(base, bindex, value, vl);
882 }
883
884 //
885 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8(
886 // CHECK-RV64-NEXT: entry:
887 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
888 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
889 // CHECK-RV64-NEXT: ret void
890 //
test_vsoxei32_v_i64m8(int64_t * base,vuint32m4_t bindex,vint64m8_t value,size_t vl)891 void test_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value,
892 size_t vl) {
893 return vsoxei32_v_i64m8(base, bindex, value, vl);
894 }
895
896 //
897 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1(
898 // CHECK-RV64-NEXT: entry:
899 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
900 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
901 // CHECK-RV64-NEXT: ret void
902 //
test_vsoxei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t value,size_t vl)903 void test_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value,
904 size_t vl) {
905 return vsoxei64_v_i64m1(base, bindex, value, vl);
906 }
907
908 //
909 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2(
910 // CHECK-RV64-NEXT: entry:
911 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
912 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
913 // CHECK-RV64-NEXT: ret void
914 //
test_vsoxei64_v_i64m2(int64_t * base,vuint64m2_t bindex,vint64m2_t value,size_t vl)915 void test_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value,
916 size_t vl) {
917 return vsoxei64_v_i64m2(base, bindex, value, vl);
918 }
919
920 //
921 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4(
922 // CHECK-RV64-NEXT: entry:
923 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
924 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
925 // CHECK-RV64-NEXT: ret void
926 //
test_vsoxei64_v_i64m4(int64_t * base,vuint64m4_t bindex,vint64m4_t value,size_t vl)927 void test_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value,
928 size_t vl) {
929 return vsoxei64_v_i64m4(base, bindex, value, vl);
930 }
931
932 //
933 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8(
934 // CHECK-RV64-NEXT: entry:
935 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
936 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
937 // CHECK-RV64-NEXT: ret void
938 //
test_vsoxei64_v_i64m8(int64_t * base,vuint64m8_t bindex,vint64m8_t value,size_t vl)939 void test_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value,
940 size_t vl) {
941 return vsoxei64_v_i64m8(base, bindex, value, vl);
942 }
943
944 //
945 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8(
946 // CHECK-RV64-NEXT: entry:
947 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
948 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
949 // CHECK-RV64-NEXT: ret void
950 //
test_vsoxei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t value,size_t vl)951 void test_vsoxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value,
952 size_t vl) {
953 return vsoxei8_v_u8mf8(base, bindex, value, vl);
954 }
955
956 //
957 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4(
958 // CHECK-RV64-NEXT: entry:
959 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
960 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
961 // CHECK-RV64-NEXT: ret void
962 //
test_vsoxei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t value,size_t vl)963 void test_vsoxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value,
964 size_t vl) {
965 return vsoxei8_v_u8mf4(base, bindex, value, vl);
966 }
967
968 //
969 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2(
970 // CHECK-RV64-NEXT: entry:
971 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
972 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
973 // CHECK-RV64-NEXT: ret void
974 //
test_vsoxei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t value,size_t vl)975 void test_vsoxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value,
976 size_t vl) {
977 return vsoxei8_v_u8mf2(base, bindex, value, vl);
978 }
979
980 //
981 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1(
982 // CHECK-RV64-NEXT: entry:
983 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
984 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
985 // CHECK-RV64-NEXT: ret void
986 //
test_vsoxei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t value,size_t vl)987 void test_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value,
988 size_t vl) {
989 return vsoxei8_v_u8m1(base, bindex, value, vl);
990 }
991
992 //
993 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2(
994 // CHECK-RV64-NEXT: entry:
995 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
996 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
997 // CHECK-RV64-NEXT: ret void
998 //
test_vsoxei8_v_u8m2(uint8_t * base,vuint8m2_t bindex,vuint8m2_t value,size_t vl)999 void test_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value,
1000 size_t vl) {
1001 return vsoxei8_v_u8m2(base, bindex, value, vl);
1002 }
1003
1004 //
1005 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4(
1006 // CHECK-RV64-NEXT: entry:
1007 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
1008 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1009 // CHECK-RV64-NEXT: ret void
1010 //
test_vsoxei8_v_u8m4(uint8_t * base,vuint8m4_t bindex,vuint8m4_t value,size_t vl)1011 void test_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value,
1012 size_t vl) {
1013 return vsoxei8_v_u8m4(base, bindex, value, vl);
1014 }
1015
1016 //
1017 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8(
1018 // CHECK-RV64-NEXT: entry:
1019 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
1020 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1021 // CHECK-RV64-NEXT: ret void
1022 //
test_vsoxei8_v_u8m8(uint8_t * base,vuint8m8_t bindex,vuint8m8_t value,size_t vl)1023 void test_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value,
1024 size_t vl) {
1025 return vsoxei8_v_u8m8(base, bindex, value, vl);
1026 }
1027
1028 //
1029 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8(
1030 // CHECK-RV64-NEXT: entry:
1031 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
1032 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1033 // CHECK-RV64-NEXT: ret void
1034 //
test_vsoxei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t value,size_t vl)1035 void test_vsoxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex,
1036 vuint8mf8_t value, size_t vl) {
1037 return vsoxei16_v_u8mf8(base, bindex, value, vl);
1038 }
1039
1040 //
1041 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4(
1042 // CHECK-RV64-NEXT: entry:
1043 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
1044 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1045 // CHECK-RV64-NEXT: ret void
1046 //
test_vsoxei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t value,size_t vl)1047 void test_vsoxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex,
1048 vuint8mf4_t value, size_t vl) {
1049 return vsoxei16_v_u8mf4(base, bindex, value, vl);
1050 }
1051
1052 //
1053 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2(
1054 // CHECK-RV64-NEXT: entry:
1055 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
1056 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1057 // CHECK-RV64-NEXT: ret void
1058 //
test_vsoxei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t value,size_t vl)1059 void test_vsoxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value,
1060 size_t vl) {
1061 return vsoxei16_v_u8mf2(base, bindex, value, vl);
1062 }
1063
1064 //
1065 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1(
1066 // CHECK-RV64-NEXT: entry:
1067 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
1068 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1069 // CHECK-RV64-NEXT: ret void
1070 //
test_vsoxei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t value,size_t vl)1071 void test_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value,
1072 size_t vl) {
1073 return vsoxei16_v_u8m1(base, bindex, value, vl);
1074 }
1075
1076 //
1077 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2(
1078 // CHECK-RV64-NEXT: entry:
1079 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
1080 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1081 // CHECK-RV64-NEXT: ret void
1082 //
test_vsoxei16_v_u8m2(uint8_t * base,vuint16m4_t bindex,vuint8m2_t value,size_t vl)1083 void test_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value,
1084 size_t vl) {
1085 return vsoxei16_v_u8m2(base, bindex, value, vl);
1086 }
1087
1088 //
1089 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4(
1090 // CHECK-RV64-NEXT: entry:
1091 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
1092 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1093 // CHECK-RV64-NEXT: ret void
1094 //
test_vsoxei16_v_u8m4(uint8_t * base,vuint16m8_t bindex,vuint8m4_t value,size_t vl)1095 void test_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value,
1096 size_t vl) {
1097 return vsoxei16_v_u8m4(base, bindex, value, vl);
1098 }
1099
1100 //
1101 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8(
1102 // CHECK-RV64-NEXT: entry:
1103 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
1104 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1105 // CHECK-RV64-NEXT: ret void
1106 //
test_vsoxei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t value,size_t vl)1107 void test_vsoxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex,
1108 vuint8mf8_t value, size_t vl) {
1109 return vsoxei32_v_u8mf8(base, bindex, value, vl);
1110 }
1111
1112 //
1113 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4(
1114 // CHECK-RV64-NEXT: entry:
1115 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
1116 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1117 // CHECK-RV64-NEXT: ret void
1118 //
test_vsoxei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t value,size_t vl)1119 void test_vsoxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value,
1120 size_t vl) {
1121 return vsoxei32_v_u8mf4(base, bindex, value, vl);
1122 }
1123
1124 //
1125 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2(
1126 // CHECK-RV64-NEXT: entry:
1127 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
1128 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1129 // CHECK-RV64-NEXT: ret void
1130 //
test_vsoxei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t value,size_t vl)1131 void test_vsoxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value,
1132 size_t vl) {
1133 return vsoxei32_v_u8mf2(base, bindex, value, vl);
1134 }
1135
1136 //
1137 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1(
1138 // CHECK-RV64-NEXT: entry:
1139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
1140 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1141 // CHECK-RV64-NEXT: ret void
1142 //
test_vsoxei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t value,size_t vl)1143 void test_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value,
1144 size_t vl) {
1145 return vsoxei32_v_u8m1(base, bindex, value, vl);
1146 }
1147
1148 //
1149 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2(
1150 // CHECK-RV64-NEXT: entry:
1151 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
1152 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1153 // CHECK-RV64-NEXT: ret void
1154 //
test_vsoxei32_v_u8m2(uint8_t * base,vuint32m8_t bindex,vuint8m2_t value,size_t vl)1155 void test_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value,
1156 size_t vl) {
1157 return vsoxei32_v_u8m2(base, bindex, value, vl);
1158 }
1159
1160 //
1161 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8(
1162 // CHECK-RV64-NEXT: entry:
1163 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
1164 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1165 // CHECK-RV64-NEXT: ret void
1166 //
test_vsoxei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t value,size_t vl)1167 void test_vsoxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value,
1168 size_t vl) {
1169 return vsoxei64_v_u8mf8(base, bindex, value, vl);
1170 }
1171
1172 //
1173 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4(
1174 // CHECK-RV64-NEXT: entry:
1175 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
1176 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1177 // CHECK-RV64-NEXT: ret void
1178 //
test_vsoxei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t value,size_t vl)1179 void test_vsoxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value,
1180 size_t vl) {
1181 return vsoxei64_v_u8mf4(base, bindex, value, vl);
1182 }
1183
1184 //
1185 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2(
1186 // CHECK-RV64-NEXT: entry:
1187 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
1188 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1189 // CHECK-RV64-NEXT: ret void
1190 //
test_vsoxei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t value,size_t vl)1191 void test_vsoxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value,
1192 size_t vl) {
1193 return vsoxei64_v_u8mf2(base, bindex, value, vl);
1194 }
1195
1196 //
1197 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1(
1198 // CHECK-RV64-NEXT: entry:
1199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
1200 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1201 // CHECK-RV64-NEXT: ret void
1202 //
test_vsoxei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t value,size_t vl)1203 void test_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value,
1204 size_t vl) {
1205 return vsoxei64_v_u8m1(base, bindex, value, vl);
1206 }
1207
1208 //
1209 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4(
1210 // CHECK-RV64-NEXT: entry:
1211 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1212 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1213 // CHECK-RV64-NEXT: ret void
1214 //
test_vsoxei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t value,size_t vl)1215 void test_vsoxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex,
1216 vuint16mf4_t value, size_t vl) {
1217 return vsoxei8_v_u16mf4(base, bindex, value, vl);
1218 }
1219
1220 //
1221 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2(
1222 // CHECK-RV64-NEXT: entry:
1223 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1224 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1225 // CHECK-RV64-NEXT: ret void
1226 //
test_vsoxei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t value,size_t vl)1227 void test_vsoxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex,
1228 vuint16mf2_t value, size_t vl) {
1229 return vsoxei8_v_u16mf2(base, bindex, value, vl);
1230 }
1231
1232 //
1233 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1(
1234 // CHECK-RV64-NEXT: entry:
1235 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1236 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1237 // CHECK-RV64-NEXT: ret void
1238 //
test_vsoxei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t value,size_t vl)1239 void test_vsoxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value,
1240 size_t vl) {
1241 return vsoxei8_v_u16m1(base, bindex, value, vl);
1242 }
1243
1244 //
1245 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2(
1246 // CHECK-RV64-NEXT: entry:
1247 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1248 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1249 // CHECK-RV64-NEXT: ret void
1250 //
test_vsoxei8_v_u16m2(uint16_t * base,vuint8m1_t bindex,vuint16m2_t value,size_t vl)1251 void test_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value,
1252 size_t vl) {
1253 return vsoxei8_v_u16m2(base, bindex, value, vl);
1254 }
1255
1256 //
1257 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4(
1258 // CHECK-RV64-NEXT: entry:
1259 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1260 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1261 // CHECK-RV64-NEXT: ret void
1262 //
test_vsoxei8_v_u16m4(uint16_t * base,vuint8m2_t bindex,vuint16m4_t value,size_t vl)1263 void test_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value,
1264 size_t vl) {
1265 return vsoxei8_v_u16m4(base, bindex, value, vl);
1266 }
1267
1268 //
1269 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8(
1270 // CHECK-RV64-NEXT: entry:
1271 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
1272 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1273 // CHECK-RV64-NEXT: ret void
1274 //
test_vsoxei8_v_u16m8(uint16_t * base,vuint8m4_t bindex,vuint16m8_t value,size_t vl)1275 void test_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value,
1276 size_t vl) {
1277 return vsoxei8_v_u16m8(base, bindex, value, vl);
1278 }
1279
1280 //
1281 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4(
1282 // CHECK-RV64-NEXT: entry:
1283 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1284 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1285 // CHECK-RV64-NEXT: ret void
1286 //
test_vsoxei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t value,size_t vl)1287 void test_vsoxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex,
1288 vuint16mf4_t value, size_t vl) {
1289 return vsoxei16_v_u16mf4(base, bindex, value, vl);
1290 }
1291
1292 //
1293 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2(
1294 // CHECK-RV64-NEXT: entry:
1295 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1296 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1297 // CHECK-RV64-NEXT: ret void
1298 //
test_vsoxei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t value,size_t vl)1299 void test_vsoxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex,
1300 vuint16mf2_t value, size_t vl) {
1301 return vsoxei16_v_u16mf2(base, bindex, value, vl);
1302 }
1303
1304 //
1305 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1(
1306 // CHECK-RV64-NEXT: entry:
1307 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1308 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1309 // CHECK-RV64-NEXT: ret void
1310 //
test_vsoxei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t value,size_t vl)1311 void test_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex,
1312 vuint16m1_t value, size_t vl) {
1313 return vsoxei16_v_u16m1(base, bindex, value, vl);
1314 }
1315
1316 //
1317 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2(
1318 // CHECK-RV64-NEXT: entry:
1319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1320 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1321 // CHECK-RV64-NEXT: ret void
1322 //
test_vsoxei16_v_u16m2(uint16_t * base,vuint16m2_t bindex,vuint16m2_t value,size_t vl)1323 void test_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex,
1324 vuint16m2_t value, size_t vl) {
1325 return vsoxei16_v_u16m2(base, bindex, value, vl);
1326 }
1327
1328 //
1329 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4(
1330 // CHECK-RV64-NEXT: entry:
1331 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1332 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1333 // CHECK-RV64-NEXT: ret void
1334 //
test_vsoxei16_v_u16m4(uint16_t * base,vuint16m4_t bindex,vuint16m4_t value,size_t vl)1335 void test_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex,
1336 vuint16m4_t value, size_t vl) {
1337 return vsoxei16_v_u16m4(base, bindex, value, vl);
1338 }
1339
1340 //
1341 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8(
1342 // CHECK-RV64-NEXT: entry:
1343 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
1344 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1345 // CHECK-RV64-NEXT: ret void
1346 //
test_vsoxei16_v_u16m8(uint16_t * base,vuint16m8_t bindex,vuint16m8_t value,size_t vl)1347 void test_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex,
1348 vuint16m8_t value, size_t vl) {
1349 return vsoxei16_v_u16m8(base, bindex, value, vl);
1350 }
1351
1352 //
1353 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4(
1354 // CHECK-RV64-NEXT: entry:
1355 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1356 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1357 // CHECK-RV64-NEXT: ret void
1358 //
test_vsoxei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t value,size_t vl)1359 void test_vsoxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex,
1360 vuint16mf4_t value, size_t vl) {
1361 return vsoxei32_v_u16mf4(base, bindex, value, vl);
1362 }
1363
1364 //
1365 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2(
1366 // CHECK-RV64-NEXT: entry:
1367 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1368 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1369 // CHECK-RV64-NEXT: ret void
1370 //
test_vsoxei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t value,size_t vl)1371 void test_vsoxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex,
1372 vuint16mf2_t value, size_t vl) {
1373 return vsoxei32_v_u16mf2(base, bindex, value, vl);
1374 }
1375
1376 //
1377 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1(
1378 // CHECK-RV64-NEXT: entry:
1379 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1380 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1381 // CHECK-RV64-NEXT: ret void
1382 //
test_vsoxei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t value,size_t vl)1383 void test_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex,
1384 vuint16m1_t value, size_t vl) {
1385 return vsoxei32_v_u16m1(base, bindex, value, vl);
1386 }
1387
1388 //
1389 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2(
1390 // CHECK-RV64-NEXT: entry:
1391 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1392 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1393 // CHECK-RV64-NEXT: ret void
1394 //
test_vsoxei32_v_u16m2(uint16_t * base,vuint32m4_t bindex,vuint16m2_t value,size_t vl)1395 void test_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex,
1396 vuint16m2_t value, size_t vl) {
1397 return vsoxei32_v_u16m2(base, bindex, value, vl);
1398 }
1399
1400 //
1401 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4(
1402 // CHECK-RV64-NEXT: entry:
1403 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1404 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1405 // CHECK-RV64-NEXT: ret void
1406 //
test_vsoxei32_v_u16m4(uint16_t * base,vuint32m8_t bindex,vuint16m4_t value,size_t vl)1407 void test_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex,
1408 vuint16m4_t value, size_t vl) {
1409 return vsoxei32_v_u16m4(base, bindex, value, vl);
1410 }
1411
1412 //
1413 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4(
1414 // CHECK-RV64-NEXT: entry:
1415 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1416 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1417 // CHECK-RV64-NEXT: ret void
1418 //
test_vsoxei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t value,size_t vl)1419 void test_vsoxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex,
1420 vuint16mf4_t value, size_t vl) {
1421 return vsoxei64_v_u16mf4(base, bindex, value, vl);
1422 }
1423
1424 //
1425 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2(
1426 // CHECK-RV64-NEXT: entry:
1427 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1428 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1429 // CHECK-RV64-NEXT: ret void
1430 //
test_vsoxei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t value,size_t vl)1431 void test_vsoxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex,
1432 vuint16mf2_t value, size_t vl) {
1433 return vsoxei64_v_u16mf2(base, bindex, value, vl);
1434 }
1435
1436 //
1437 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1(
1438 // CHECK-RV64-NEXT: entry:
1439 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1440 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1441 // CHECK-RV64-NEXT: ret void
1442 //
test_vsoxei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t value,size_t vl)1443 void test_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex,
1444 vuint16m1_t value, size_t vl) {
1445 return vsoxei64_v_u16m1(base, bindex, value, vl);
1446 }
1447
1448 //
1449 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2(
1450 // CHECK-RV64-NEXT: entry:
1451 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1452 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1453 // CHECK-RV64-NEXT: ret void
1454 //
test_vsoxei64_v_u16m2(uint16_t * base,vuint64m8_t bindex,vuint16m2_t value,size_t vl)1455 void test_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex,
1456 vuint16m2_t value, size_t vl) {
1457 return vsoxei64_v_u16m2(base, bindex, value, vl);
1458 }
1459
1460 //
1461 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2(
1462 // CHECK-RV64-NEXT: entry:
1463 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1464 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1465 // CHECK-RV64-NEXT: ret void
1466 //
test_vsoxei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t value,size_t vl)1467 void test_vsoxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex,
1468 vuint32mf2_t value, size_t vl) {
1469 return vsoxei8_v_u32mf2(base, bindex, value, vl);
1470 }
1471
1472 //
1473 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1(
1474 // CHECK-RV64-NEXT: entry:
1475 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1476 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1477 // CHECK-RV64-NEXT: ret void
1478 //
test_vsoxei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t value,size_t vl)1479 void test_vsoxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value,
1480 size_t vl) {
1481 return vsoxei8_v_u32m1(base, bindex, value, vl);
1482 }
1483
1484 //
1485 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2(
1486 // CHECK-RV64-NEXT: entry:
1487 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1488 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1489 // CHECK-RV64-NEXT: ret void
1490 //
test_vsoxei8_v_u32m2(uint32_t * base,vuint8mf2_t bindex,vuint32m2_t value,size_t vl)1491 void test_vsoxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value,
1492 size_t vl) {
1493 return vsoxei8_v_u32m2(base, bindex, value, vl);
1494 }
1495
1496 //
1497 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4(
1498 // CHECK-RV64-NEXT: entry:
1499 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1500 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1501 // CHECK-RV64-NEXT: ret void
1502 //
test_vsoxei8_v_u32m4(uint32_t * base,vuint8m1_t bindex,vuint32m4_t value,size_t vl)1503 void test_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value,
1504 size_t vl) {
1505 return vsoxei8_v_u32m4(base, bindex, value, vl);
1506 }
1507
1508 //
1509 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8(
1510 // CHECK-RV64-NEXT: entry:
1511 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1512 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1513 // CHECK-RV64-NEXT: ret void
1514 //
test_vsoxei8_v_u32m8(uint32_t * base,vuint8m2_t bindex,vuint32m8_t value,size_t vl)1515 void test_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value,
1516 size_t vl) {
1517 return vsoxei8_v_u32m8(base, bindex, value, vl);
1518 }
1519
1520 //
1521 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2(
1522 // CHECK-RV64-NEXT: entry:
1523 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1524 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1525 // CHECK-RV64-NEXT: ret void
1526 //
test_vsoxei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t value,size_t vl)1527 void test_vsoxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex,
1528 vuint32mf2_t value, size_t vl) {
1529 return vsoxei16_v_u32mf2(base, bindex, value, vl);
1530 }
1531
1532 //
1533 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1(
1534 // CHECK-RV64-NEXT: entry:
1535 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1536 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1537 // CHECK-RV64-NEXT: ret void
1538 //
test_vsoxei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t value,size_t vl)1539 void test_vsoxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex,
1540 vuint32m1_t value, size_t vl) {
1541 return vsoxei16_v_u32m1(base, bindex, value, vl);
1542 }
1543
1544 //
1545 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2(
1546 // CHECK-RV64-NEXT: entry:
1547 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1548 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1549 // CHECK-RV64-NEXT: ret void
1550 //
test_vsoxei16_v_u32m2(uint32_t * base,vuint16m1_t bindex,vuint32m2_t value,size_t vl)1551 void test_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex,
1552 vuint32m2_t value, size_t vl) {
1553 return vsoxei16_v_u32m2(base, bindex, value, vl);
1554 }
1555
1556 //
1557 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4(
1558 // CHECK-RV64-NEXT: entry:
1559 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1560 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1561 // CHECK-RV64-NEXT: ret void
1562 //
test_vsoxei16_v_u32m4(uint32_t * base,vuint16m2_t bindex,vuint32m4_t value,size_t vl)1563 void test_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex,
1564 vuint32m4_t value, size_t vl) {
1565 return vsoxei16_v_u32m4(base, bindex, value, vl);
1566 }
1567
1568 //
1569 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8(
1570 // CHECK-RV64-NEXT: entry:
1571 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1572 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1573 // CHECK-RV64-NEXT: ret void
1574 //
test_vsoxei16_v_u32m8(uint32_t * base,vuint16m4_t bindex,vuint32m8_t value,size_t vl)1575 void test_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex,
1576 vuint32m8_t value, size_t vl) {
1577 return vsoxei16_v_u32m8(base, bindex, value, vl);
1578 }
1579
1580 //
1581 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2(
1582 // CHECK-RV64-NEXT: entry:
1583 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1584 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1585 // CHECK-RV64-NEXT: ret void
1586 //
test_vsoxei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t value,size_t vl)1587 void test_vsoxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex,
1588 vuint32mf2_t value, size_t vl) {
1589 return vsoxei32_v_u32mf2(base, bindex, value, vl);
1590 }
1591
1592 //
1593 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1(
1594 // CHECK-RV64-NEXT: entry:
1595 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1596 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1597 // CHECK-RV64-NEXT: ret void
1598 //
test_vsoxei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t value,size_t vl)1599 void test_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex,
1600 vuint32m1_t value, size_t vl) {
1601 return vsoxei32_v_u32m1(base, bindex, value, vl);
1602 }
1603
1604 //
1605 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2(
1606 // CHECK-RV64-NEXT: entry:
1607 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1608 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1609 // CHECK-RV64-NEXT: ret void
1610 //
test_vsoxei32_v_u32m2(uint32_t * base,vuint32m2_t bindex,vuint32m2_t value,size_t vl)1611 void test_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex,
1612 vuint32m2_t value, size_t vl) {
1613 return vsoxei32_v_u32m2(base, bindex, value, vl);
1614 }
1615
1616 //
1617 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4(
1618 // CHECK-RV64-NEXT: entry:
1619 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1620 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1621 // CHECK-RV64-NEXT: ret void
1622 //
test_vsoxei32_v_u32m4(uint32_t * base,vuint32m4_t bindex,vuint32m4_t value,size_t vl)1623 void test_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex,
1624 vuint32m4_t value, size_t vl) {
1625 return vsoxei32_v_u32m4(base, bindex, value, vl);
1626 }
1627
1628 //
1629 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8(
1630 // CHECK-RV64-NEXT: entry:
1631 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1632 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1633 // CHECK-RV64-NEXT: ret void
1634 //
test_vsoxei32_v_u32m8(uint32_t * base,vuint32m8_t bindex,vuint32m8_t value,size_t vl)1635 void test_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex,
1636 vuint32m8_t value, size_t vl) {
1637 return vsoxei32_v_u32m8(base, bindex, value, vl);
1638 }
1639
1640 //
1641 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2(
1642 // CHECK-RV64-NEXT: entry:
1643 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1644 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1645 // CHECK-RV64-NEXT: ret void
1646 //
test_vsoxei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t value,size_t vl)1647 void test_vsoxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex,
1648 vuint32mf2_t value, size_t vl) {
1649 return vsoxei64_v_u32mf2(base, bindex, value, vl);
1650 }
1651
1652 //
1653 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1(
1654 // CHECK-RV64-NEXT: entry:
1655 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1656 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1657 // CHECK-RV64-NEXT: ret void
1658 //
test_vsoxei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t value,size_t vl)1659 void test_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex,
1660 vuint32m1_t value, size_t vl) {
1661 return vsoxei64_v_u32m1(base, bindex, value, vl);
1662 }
1663
1664 //
1665 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2(
1666 // CHECK-RV64-NEXT: entry:
1667 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1668 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1669 // CHECK-RV64-NEXT: ret void
1670 //
test_vsoxei64_v_u32m2(uint32_t * base,vuint64m4_t bindex,vuint32m2_t value,size_t vl)1671 void test_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex,
1672 vuint32m2_t value, size_t vl) {
1673 return vsoxei64_v_u32m2(base, bindex, value, vl);
1674 }
1675
1676 //
1677 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4(
1678 // CHECK-RV64-NEXT: entry:
1679 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1680 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1681 // CHECK-RV64-NEXT: ret void
1682 //
test_vsoxei64_v_u32m4(uint32_t * base,vuint64m8_t bindex,vuint32m4_t value,size_t vl)1683 void test_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex,
1684 vuint32m4_t value, size_t vl) {
1685 return vsoxei64_v_u32m4(base, bindex, value, vl);
1686 }
1687
1688 //
1689 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1(
1690 // CHECK-RV64-NEXT: entry:
1691 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1692 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1693 // CHECK-RV64-NEXT: ret void
1694 //
test_vsoxei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t value,size_t vl)1695 void test_vsoxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value,
1696 size_t vl) {
1697 return vsoxei8_v_u64m1(base, bindex, value, vl);
1698 }
1699
1700 //
1701 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2(
1702 // CHECK-RV64-NEXT: entry:
1703 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1704 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1705 // CHECK-RV64-NEXT: ret void
1706 //
test_vsoxei8_v_u64m2(uint64_t * base,vuint8mf4_t bindex,vuint64m2_t value,size_t vl)1707 void test_vsoxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value,
1708 size_t vl) {
1709 return vsoxei8_v_u64m2(base, bindex, value, vl);
1710 }
1711
1712 //
1713 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4(
1714 // CHECK-RV64-NEXT: entry:
1715 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1716 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1717 // CHECK-RV64-NEXT: ret void
1718 //
test_vsoxei8_v_u64m4(uint64_t * base,vuint8mf2_t bindex,vuint64m4_t value,size_t vl)1719 void test_vsoxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value,
1720 size_t vl) {
1721 return vsoxei8_v_u64m4(base, bindex, value, vl);
1722 }
1723
1724 //
1725 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8(
1726 // CHECK-RV64-NEXT: entry:
1727 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1728 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1729 // CHECK-RV64-NEXT: ret void
1730 //
test_vsoxei8_v_u64m8(uint64_t * base,vuint8m1_t bindex,vuint64m8_t value,size_t vl)1731 void test_vsoxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value,
1732 size_t vl) {
1733 return vsoxei8_v_u64m8(base, bindex, value, vl);
1734 }
1735
1736 //
1737 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1(
1738 // CHECK-RV64-NEXT: entry:
1739 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1740 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1741 // CHECK-RV64-NEXT: ret void
1742 //
test_vsoxei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t value,size_t vl)1743 void test_vsoxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex,
1744 vuint64m1_t value, size_t vl) {
1745 return vsoxei16_v_u64m1(base, bindex, value, vl);
1746 }
1747
1748 //
1749 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2(
1750 // CHECK-RV64-NEXT: entry:
1751 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1752 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1753 // CHECK-RV64-NEXT: ret void
1754 //
test_vsoxei16_v_u64m2(uint64_t * base,vuint16mf2_t bindex,vuint64m2_t value,size_t vl)1755 void test_vsoxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex,
1756 vuint64m2_t value, size_t vl) {
1757 return vsoxei16_v_u64m2(base, bindex, value, vl);
1758 }
1759
1760 //
1761 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4(
1762 // CHECK-RV64-NEXT: entry:
1763 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1764 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1765 // CHECK-RV64-NEXT: ret void
1766 //
test_vsoxei16_v_u64m4(uint64_t * base,vuint16m1_t bindex,vuint64m4_t value,size_t vl)1767 void test_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex,
1768 vuint64m4_t value, size_t vl) {
1769 return vsoxei16_v_u64m4(base, bindex, value, vl);
1770 }
1771
1772 //
1773 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8(
1774 // CHECK-RV64-NEXT: entry:
1775 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1776 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1777 // CHECK-RV64-NEXT: ret void
1778 //
test_vsoxei16_v_u64m8(uint64_t * base,vuint16m2_t bindex,vuint64m8_t value,size_t vl)1779 void test_vsoxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex,
1780 vuint64m8_t value, size_t vl) {
1781 return vsoxei16_v_u64m8(base, bindex, value, vl);
1782 }
1783
1784 //
1785 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1(
1786 // CHECK-RV64-NEXT: entry:
1787 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1788 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1789 // CHECK-RV64-NEXT: ret void
1790 //
test_vsoxei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t value,size_t vl)1791 void test_vsoxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex,
1792 vuint64m1_t value, size_t vl) {
1793 return vsoxei32_v_u64m1(base, bindex, value, vl);
1794 }
1795
1796 //
1797 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2(
1798 // CHECK-RV64-NEXT: entry:
1799 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1800 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1801 // CHECK-RV64-NEXT: ret void
1802 //
test_vsoxei32_v_u64m2(uint64_t * base,vuint32m1_t bindex,vuint64m2_t value,size_t vl)1803 void test_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex,
1804 vuint64m2_t value, size_t vl) {
1805 return vsoxei32_v_u64m2(base, bindex, value, vl);
1806 }
1807
1808 //
1809 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4(
1810 // CHECK-RV64-NEXT: entry:
1811 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1812 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1813 // CHECK-RV64-NEXT: ret void
1814 //
test_vsoxei32_v_u64m4(uint64_t * base,vuint32m2_t bindex,vuint64m4_t value,size_t vl)1815 void test_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex,
1816 vuint64m4_t value, size_t vl) {
1817 return vsoxei32_v_u64m4(base, bindex, value, vl);
1818 }
1819
1820 //
1821 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8(
1822 // CHECK-RV64-NEXT: entry:
1823 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1824 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1825 // CHECK-RV64-NEXT: ret void
1826 //
test_vsoxei32_v_u64m8(uint64_t * base,vuint32m4_t bindex,vuint64m8_t value,size_t vl)1827 void test_vsoxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex,
1828 vuint64m8_t value, size_t vl) {
1829 return vsoxei32_v_u64m8(base, bindex, value, vl);
1830 }
1831
1832 //
1833 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1(
1834 // CHECK-RV64-NEXT: entry:
1835 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1836 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1837 // CHECK-RV64-NEXT: ret void
1838 //
test_vsoxei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t value,size_t vl)1839 void test_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex,
1840 vuint64m1_t value, size_t vl) {
1841 return vsoxei64_v_u64m1(base, bindex, value, vl);
1842 }
1843
1844 //
1845 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2(
1846 // CHECK-RV64-NEXT: entry:
1847 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1848 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1849 // CHECK-RV64-NEXT: ret void
1850 //
test_vsoxei64_v_u64m2(uint64_t * base,vuint64m2_t bindex,vuint64m2_t value,size_t vl)1851 void test_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex,
1852 vuint64m2_t value, size_t vl) {
1853 return vsoxei64_v_u64m2(base, bindex, value, vl);
1854 }
1855
1856 //
1857 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4(
1858 // CHECK-RV64-NEXT: entry:
1859 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1860 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1861 // CHECK-RV64-NEXT: ret void
1862 //
test_vsoxei64_v_u64m4(uint64_t * base,vuint64m4_t bindex,vuint64m4_t value,size_t vl)1863 void test_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex,
1864 vuint64m4_t value, size_t vl) {
1865 return vsoxei64_v_u64m4(base, bindex, value, vl);
1866 }
1867
1868 //
1869 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8(
1870 // CHECK-RV64-NEXT: entry:
1871 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1872 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1873 // CHECK-RV64-NEXT: ret void
1874 //
test_vsoxei64_v_u64m8(uint64_t * base,vuint64m8_t bindex,vuint64m8_t value,size_t vl)1875 void test_vsoxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex,
1876 vuint64m8_t value, size_t vl) {
1877 return vsoxei64_v_u64m8(base, bindex, value, vl);
1878 }
1879
1880 //
1881 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2(
1882 // CHECK-RV64-NEXT: entry:
1883 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
1884 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1885 // CHECK-RV64-NEXT: ret void
1886 //
test_vsoxei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t value,size_t vl)1887 void test_vsoxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value,
1888 size_t vl) {
1889 return vsoxei8_v_f32mf2(base, bindex, value, vl);
1890 }
1891
1892 //
1893 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1(
1894 // CHECK-RV64-NEXT: entry:
1895 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
1896 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1897 // CHECK-RV64-NEXT: ret void
1898 //
test_vsoxei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t value,size_t vl)1899 void test_vsoxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value,
1900 size_t vl) {
1901 return vsoxei8_v_f32m1(base, bindex, value, vl);
1902 }
1903
1904 //
1905 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2(
1906 // CHECK-RV64-NEXT: entry:
1907 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
1908 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1909 // CHECK-RV64-NEXT: ret void
1910 //
test_vsoxei8_v_f32m2(float * base,vuint8mf2_t bindex,vfloat32m2_t value,size_t vl)1911 void test_vsoxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value,
1912 size_t vl) {
1913 return vsoxei8_v_f32m2(base, bindex, value, vl);
1914 }
1915
1916 //
1917 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4(
1918 // CHECK-RV64-NEXT: entry:
1919 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
1920 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1921 // CHECK-RV64-NEXT: ret void
1922 //
test_vsoxei8_v_f32m4(float * base,vuint8m1_t bindex,vfloat32m4_t value,size_t vl)1923 void test_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value,
1924 size_t vl) {
1925 return vsoxei8_v_f32m4(base, bindex, value, vl);
1926 }
1927
1928 //
1929 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8(
1930 // CHECK-RV64-NEXT: entry:
1931 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
1932 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1933 // CHECK-RV64-NEXT: ret void
1934 //
test_vsoxei8_v_f32m8(float * base,vuint8m2_t bindex,vfloat32m8_t value,size_t vl)1935 void test_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value,
1936 size_t vl) {
1937 return vsoxei8_v_f32m8(base, bindex, value, vl);
1938 }
1939
1940 //
1941 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2(
1942 // CHECK-RV64-NEXT: entry:
1943 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
1944 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1945 // CHECK-RV64-NEXT: ret void
1946 //
test_vsoxei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t value,size_t vl)1947 void test_vsoxei16_v_f32mf2(float *base, vuint16mf4_t bindex,
1948 vfloat32mf2_t value, size_t vl) {
1949 return vsoxei16_v_f32mf2(base, bindex, value, vl);
1950 }
1951
1952 //
1953 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1(
1954 // CHECK-RV64-NEXT: entry:
1955 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
1956 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1957 // CHECK-RV64-NEXT: ret void
1958 //
test_vsoxei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t value,size_t vl)1959 void test_vsoxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value,
1960 size_t vl) {
1961 return vsoxei16_v_f32m1(base, bindex, value, vl);
1962 }
1963
1964 //
1965 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2(
1966 // CHECK-RV64-NEXT: entry:
1967 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
1968 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1969 // CHECK-RV64-NEXT: ret void
1970 //
test_vsoxei16_v_f32m2(float * base,vuint16m1_t bindex,vfloat32m2_t value,size_t vl)1971 void test_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value,
1972 size_t vl) {
1973 return vsoxei16_v_f32m2(base, bindex, value, vl);
1974 }
1975
1976 //
1977 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4(
1978 // CHECK-RV64-NEXT: entry:
1979 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
1980 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1981 // CHECK-RV64-NEXT: ret void
1982 //
test_vsoxei16_v_f32m4(float * base,vuint16m2_t bindex,vfloat32m4_t value,size_t vl)1983 void test_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value,
1984 size_t vl) {
1985 return vsoxei16_v_f32m4(base, bindex, value, vl);
1986 }
1987
1988 //
1989 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8(
1990 // CHECK-RV64-NEXT: entry:
1991 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
1992 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1993 // CHECK-RV64-NEXT: ret void
1994 //
test_vsoxei16_v_f32m8(float * base,vuint16m4_t bindex,vfloat32m8_t value,size_t vl)1995 void test_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value,
1996 size_t vl) {
1997 return vsoxei16_v_f32m8(base, bindex, value, vl);
1998 }
1999
2000 //
2001 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2(
2002 // CHECK-RV64-NEXT: entry:
2003 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
2004 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2005 // CHECK-RV64-NEXT: ret void
2006 //
test_vsoxei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t value,size_t vl)2007 void test_vsoxei32_v_f32mf2(float *base, vuint32mf2_t bindex,
2008 vfloat32mf2_t value, size_t vl) {
2009 return vsoxei32_v_f32mf2(base, bindex, value, vl);
2010 }
2011
2012 //
2013 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1(
2014 // CHECK-RV64-NEXT: entry:
2015 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
2016 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2017 // CHECK-RV64-NEXT: ret void
2018 //
test_vsoxei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t value,size_t vl)2019 void test_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value,
2020 size_t vl) {
2021 return vsoxei32_v_f32m1(base, bindex, value, vl);
2022 }
2023
2024 //
2025 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2(
2026 // CHECK-RV64-NEXT: entry:
2027 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
2028 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2029 // CHECK-RV64-NEXT: ret void
2030 //
test_vsoxei32_v_f32m2(float * base,vuint32m2_t bindex,vfloat32m2_t value,size_t vl)2031 void test_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value,
2032 size_t vl) {
2033 return vsoxei32_v_f32m2(base, bindex, value, vl);
2034 }
2035
2036 //
2037 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4(
2038 // CHECK-RV64-NEXT: entry:
2039 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
2040 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2041 // CHECK-RV64-NEXT: ret void
2042 //
test_vsoxei32_v_f32m4(float * base,vuint32m4_t bindex,vfloat32m4_t value,size_t vl)2043 void test_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value,
2044 size_t vl) {
2045 return vsoxei32_v_f32m4(base, bindex, value, vl);
2046 }
2047
2048 //
2049 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8(
2050 // CHECK-RV64-NEXT: entry:
2051 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
2052 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2053 // CHECK-RV64-NEXT: ret void
2054 //
test_vsoxei32_v_f32m8(float * base,vuint32m8_t bindex,vfloat32m8_t value,size_t vl)2055 void test_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value,
2056 size_t vl) {
2057 return vsoxei32_v_f32m8(base, bindex, value, vl);
2058 }
2059
2060 //
2061 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2(
2062 // CHECK-RV64-NEXT: entry:
2063 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
2064 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2065 // CHECK-RV64-NEXT: ret void
2066 //
test_vsoxei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t value,size_t vl)2067 void test_vsoxei64_v_f32mf2(float *base, vuint64m1_t bindex,
2068 vfloat32mf2_t value, size_t vl) {
2069 return vsoxei64_v_f32mf2(base, bindex, value, vl);
2070 }
2071
2072 //
2073 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1(
2074 // CHECK-RV64-NEXT: entry:
2075 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
2076 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2077 // CHECK-RV64-NEXT: ret void
2078 //
test_vsoxei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t value,size_t vl)2079 void test_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value,
2080 size_t vl) {
2081 return vsoxei64_v_f32m1(base, bindex, value, vl);
2082 }
2083
2084 //
2085 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2(
2086 // CHECK-RV64-NEXT: entry:
2087 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
2088 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2089 // CHECK-RV64-NEXT: ret void
2090 //
test_vsoxei64_v_f32m2(float * base,vuint64m4_t bindex,vfloat32m2_t value,size_t vl)2091 void test_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value,
2092 size_t vl) {
2093 return vsoxei64_v_f32m2(base, bindex, value, vl);
2094 }
2095
2096 //
2097 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4(
2098 // CHECK-RV64-NEXT: entry:
2099 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
2100 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2101 // CHECK-RV64-NEXT: ret void
2102 //
test_vsoxei64_v_f32m4(float * base,vuint64m8_t bindex,vfloat32m4_t value,size_t vl)2103 void test_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value,
2104 size_t vl) {
2105 return vsoxei64_v_f32m4(base, bindex, value, vl);
2106 }
2107
2108 //
2109 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1(
2110 // CHECK-RV64-NEXT: entry:
2111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2112 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2113 // CHECK-RV64-NEXT: ret void
2114 //
test_vsoxei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t value,size_t vl)2115 void test_vsoxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value,
2116 size_t vl) {
2117 return vsoxei8_v_f64m1(base, bindex, value, vl);
2118 }
2119
2120 //
2121 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2(
2122 // CHECK-RV64-NEXT: entry:
2123 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2124 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2125 // CHECK-RV64-NEXT: ret void
2126 //
test_vsoxei8_v_f64m2(double * base,vuint8mf4_t bindex,vfloat64m2_t value,size_t vl)2127 void test_vsoxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value,
2128 size_t vl) {
2129 return vsoxei8_v_f64m2(base, bindex, value, vl);
2130 }
2131
2132 //
2133 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4(
2134 // CHECK-RV64-NEXT: entry:
2135 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2136 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2137 // CHECK-RV64-NEXT: ret void
2138 //
test_vsoxei8_v_f64m4(double * base,vuint8mf2_t bindex,vfloat64m4_t value,size_t vl)2139 void test_vsoxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value,
2140 size_t vl) {
2141 return vsoxei8_v_f64m4(base, bindex, value, vl);
2142 }
2143
2144 //
2145 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8(
2146 // CHECK-RV64-NEXT: entry:
2147 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2148 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2149 // CHECK-RV64-NEXT: ret void
2150 //
test_vsoxei8_v_f64m8(double * base,vuint8m1_t bindex,vfloat64m8_t value,size_t vl)2151 void test_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value,
2152 size_t vl) {
2153 return vsoxei8_v_f64m8(base, bindex, value, vl);
2154 }
2155
2156 //
2157 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1(
2158 // CHECK-RV64-NEXT: entry:
2159 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2160 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2161 // CHECK-RV64-NEXT: ret void
2162 //
test_vsoxei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t value,size_t vl)2163 void test_vsoxei16_v_f64m1(double *base, vuint16mf4_t bindex,
2164 vfloat64m1_t value, size_t vl) {
2165 return vsoxei16_v_f64m1(base, bindex, value, vl);
2166 }
2167
2168 //
2169 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2(
2170 // CHECK-RV64-NEXT: entry:
2171 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2172 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2173 // CHECK-RV64-NEXT: ret void
2174 //
test_vsoxei16_v_f64m2(double * base,vuint16mf2_t bindex,vfloat64m2_t value,size_t vl)2175 void test_vsoxei16_v_f64m2(double *base, vuint16mf2_t bindex,
2176 vfloat64m2_t value, size_t vl) {
2177 return vsoxei16_v_f64m2(base, bindex, value, vl);
2178 }
2179
2180 //
2181 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4(
2182 // CHECK-RV64-NEXT: entry:
2183 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2184 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2185 // CHECK-RV64-NEXT: ret void
2186 //
test_vsoxei16_v_f64m4(double * base,vuint16m1_t bindex,vfloat64m4_t value,size_t vl)2187 void test_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value,
2188 size_t vl) {
2189 return vsoxei16_v_f64m4(base, bindex, value, vl);
2190 }
2191
2192 //
2193 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8(
2194 // CHECK-RV64-NEXT: entry:
2195 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2196 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2197 // CHECK-RV64-NEXT: ret void
2198 //
test_vsoxei16_v_f64m8(double * base,vuint16m2_t bindex,vfloat64m8_t value,size_t vl)2199 void test_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value,
2200 size_t vl) {
2201 return vsoxei16_v_f64m8(base, bindex, value, vl);
2202 }
2203
2204 //
2205 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1(
2206 // CHECK-RV64-NEXT: entry:
2207 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2208 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2209 // CHECK-RV64-NEXT: ret void
2210 //
test_vsoxei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t value,size_t vl)2211 void test_vsoxei32_v_f64m1(double *base, vuint32mf2_t bindex,
2212 vfloat64m1_t value, size_t vl) {
2213 return vsoxei32_v_f64m1(base, bindex, value, vl);
2214 }
2215
2216 //
2217 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2(
2218 // CHECK-RV64-NEXT: entry:
2219 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2220 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2221 // CHECK-RV64-NEXT: ret void
2222 //
test_vsoxei32_v_f64m2(double * base,vuint32m1_t bindex,vfloat64m2_t value,size_t vl)2223 void test_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value,
2224 size_t vl) {
2225 return vsoxei32_v_f64m2(base, bindex, value, vl);
2226 }
2227
2228 //
2229 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4(
2230 // CHECK-RV64-NEXT: entry:
2231 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2232 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2233 // CHECK-RV64-NEXT: ret void
2234 //
test_vsoxei32_v_f64m4(double * base,vuint32m2_t bindex,vfloat64m4_t value,size_t vl)2235 void test_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value,
2236 size_t vl) {
2237 return vsoxei32_v_f64m4(base, bindex, value, vl);
2238 }
2239
2240 //
2241 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8(
2242 // CHECK-RV64-NEXT: entry:
2243 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2244 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2245 // CHECK-RV64-NEXT: ret void
2246 //
test_vsoxei32_v_f64m8(double * base,vuint32m4_t bindex,vfloat64m8_t value,size_t vl)2247 void test_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value,
2248 size_t vl) {
2249 return vsoxei32_v_f64m8(base, bindex, value, vl);
2250 }
2251
2252 //
2253 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1(
2254 // CHECK-RV64-NEXT: entry:
2255 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2256 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2257 // CHECK-RV64-NEXT: ret void
2258 //
test_vsoxei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t value,size_t vl)2259 void test_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value,
2260 size_t vl) {
2261 return vsoxei64_v_f64m1(base, bindex, value, vl);
2262 }
2263
2264 //
2265 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2(
2266 // CHECK-RV64-NEXT: entry:
2267 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2268 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2269 // CHECK-RV64-NEXT: ret void
2270 //
test_vsoxei64_v_f64m2(double * base,vuint64m2_t bindex,vfloat64m2_t value,size_t vl)2271 void test_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value,
2272 size_t vl) {
2273 return vsoxei64_v_f64m2(base, bindex, value, vl);
2274 }
2275
2276 //
2277 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4(
2278 // CHECK-RV64-NEXT: entry:
2279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2280 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2281 // CHECK-RV64-NEXT: ret void
2282 //
test_vsoxei64_v_f64m4(double * base,vuint64m4_t bindex,vfloat64m4_t value,size_t vl)2283 void test_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value,
2284 size_t vl) {
2285 return vsoxei64_v_f64m4(base, bindex, value, vl);
2286 }
2287
2288 //
2289 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8(
2290 // CHECK-RV64-NEXT: entry:
2291 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2292 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2293 // CHECK-RV64-NEXT: ret void
2294 //
test_vsoxei64_v_f64m8(double * base,vuint64m8_t bindex,vfloat64m8_t value,size_t vl)2295 void test_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value,
2296 size_t vl) {
2297 return vsoxei64_v_f64m8(base, bindex, value, vl);
2298 }
2299
2300 //
2301 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8_m(
2302 // CHECK-RV64-NEXT: entry:
2303 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2304 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2305 // CHECK-RV64-NEXT: ret void
2306 //
test_vsoxei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t value,size_t vl)2307 void test_vsoxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex,
2308 vint8mf8_t value, size_t vl) {
2309 return vsoxei8_v_i8mf8_m(mask, base, bindex, value, vl);
2310 }
2311
2312 //
2313 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4_m(
2314 // CHECK-RV64-NEXT: entry:
2315 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2316 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2317 // CHECK-RV64-NEXT: ret void
2318 //
test_vsoxei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t value,size_t vl)2319 void test_vsoxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex,
2320 vint8mf4_t value, size_t vl) {
2321 return vsoxei8_v_i8mf4_m(mask, base, bindex, value, vl);
2322 }
2323
2324 //
2325 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2_m(
2326 // CHECK-RV64-NEXT: entry:
2327 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2328 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2329 // CHECK-RV64-NEXT: ret void
2330 //
test_vsoxei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t value,size_t vl)2331 void test_vsoxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex,
2332 vint8mf2_t value, size_t vl) {
2333 return vsoxei8_v_i8mf2_m(mask, base, bindex, value, vl);
2334 }
2335
2336 //
2337 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1_m(
2338 // CHECK-RV64-NEXT: entry:
2339 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2340 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2341 // CHECK-RV64-NEXT: ret void
2342 //
test_vsoxei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t value,size_t vl)2343 void test_vsoxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex,
2344 vint8m1_t value, size_t vl) {
2345 return vsoxei8_v_i8m1_m(mask, base, bindex, value, vl);
2346 }
2347
2348 //
2349 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2_m(
2350 // CHECK-RV64-NEXT: entry:
2351 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2352 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2353 // CHECK-RV64-NEXT: ret void
2354 //
test_vsoxei8_v_i8m2_m(vbool4_t mask,int8_t * base,vuint8m2_t bindex,vint8m2_t value,size_t vl)2355 void test_vsoxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex,
2356 vint8m2_t value, size_t vl) {
2357 return vsoxei8_v_i8m2_m(mask, base, bindex, value, vl);
2358 }
2359
2360 //
2361 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4_m(
2362 // CHECK-RV64-NEXT: entry:
2363 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
2364 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2365 // CHECK-RV64-NEXT: ret void
2366 //
test_vsoxei8_v_i8m4_m(vbool2_t mask,int8_t * base,vuint8m4_t bindex,vint8m4_t value,size_t vl)2367 void test_vsoxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex,
2368 vint8m4_t value, size_t vl) {
2369 return vsoxei8_v_i8m4_m(mask, base, bindex, value, vl);
2370 }
2371
2372 //
2373 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8_m(
2374 // CHECK-RV64-NEXT: entry:
2375 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
2376 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2377 // CHECK-RV64-NEXT: ret void
2378 //
test_vsoxei8_v_i8m8_m(vbool1_t mask,int8_t * base,vuint8m8_t bindex,vint8m8_t value,size_t vl)2379 void test_vsoxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex,
2380 vint8m8_t value, size_t vl) {
2381 return vsoxei8_v_i8m8_m(mask, base, bindex, value, vl);
2382 }
2383
2384 //
2385 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8_m(
2386 // CHECK-RV64-NEXT: entry:
2387 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2388 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2389 // CHECK-RV64-NEXT: ret void
2390 //
test_vsoxei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t value,size_t vl)2391 void test_vsoxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex,
2392 vint8mf8_t value, size_t vl) {
2393 return vsoxei16_v_i8mf8_m(mask, base, bindex, value, vl);
2394 }
2395
2396 //
2397 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4_m(
2398 // CHECK-RV64-NEXT: entry:
2399 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2400 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2401 // CHECK-RV64-NEXT: ret void
2402 //
test_vsoxei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t value,size_t vl)2403 void test_vsoxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex,
2404 vint8mf4_t value, size_t vl) {
2405 return vsoxei16_v_i8mf4_m(mask, base, bindex, value, vl);
2406 }
2407
2408 //
2409 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2_m(
2410 // CHECK-RV64-NEXT: entry:
2411 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2412 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2413 // CHECK-RV64-NEXT: ret void
2414 //
test_vsoxei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t value,size_t vl)2415 void test_vsoxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex,
2416 vint8mf2_t value, size_t vl) {
2417 return vsoxei16_v_i8mf2_m(mask, base, bindex, value, vl);
2418 }
2419
2420 //
2421 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1_m(
2422 // CHECK-RV64-NEXT: entry:
2423 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2424 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2425 // CHECK-RV64-NEXT: ret void
2426 //
test_vsoxei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t value,size_t vl)2427 void test_vsoxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex,
2428 vint8m1_t value, size_t vl) {
2429 return vsoxei16_v_i8m1_m(mask, base, bindex, value, vl);
2430 }
2431
2432 //
2433 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2_m(
2434 // CHECK-RV64-NEXT: entry:
2435 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2436 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2437 // CHECK-RV64-NEXT: ret void
2438 //
test_vsoxei16_v_i8m2_m(vbool4_t mask,int8_t * base,vuint16m4_t bindex,vint8m2_t value,size_t vl)2439 void test_vsoxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex,
2440 vint8m2_t value, size_t vl) {
2441 return vsoxei16_v_i8m2_m(mask, base, bindex, value, vl);
2442 }
2443
2444 //
2445 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4_m(
2446 // CHECK-RV64-NEXT: entry:
2447 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
2448 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2449 // CHECK-RV64-NEXT: ret void
2450 //
test_vsoxei16_v_i8m4_m(vbool2_t mask,int8_t * base,vuint16m8_t bindex,vint8m4_t value,size_t vl)2451 void test_vsoxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex,
2452 vint8m4_t value, size_t vl) {
2453 return vsoxei16_v_i8m4_m(mask, base, bindex, value, vl);
2454 }
2455
2456 //
2457 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8_m(
2458 // CHECK-RV64-NEXT: entry:
2459 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2460 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2461 // CHECK-RV64-NEXT: ret void
2462 //
test_vsoxei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t value,size_t vl)2463 void test_vsoxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex,
2464 vint8mf8_t value, size_t vl) {
2465 return vsoxei32_v_i8mf8_m(mask, base, bindex, value, vl);
2466 }
2467
2468 //
2469 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4_m(
2470 // CHECK-RV64-NEXT: entry:
2471 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2472 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2473 // CHECK-RV64-NEXT: ret void
2474 //
test_vsoxei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t value,size_t vl)2475 void test_vsoxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex,
2476 vint8mf4_t value, size_t vl) {
2477 return vsoxei32_v_i8mf4_m(mask, base, bindex, value, vl);
2478 }
2479
2480 //
2481 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2_m(
2482 // CHECK-RV64-NEXT: entry:
2483 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2484 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2485 // CHECK-RV64-NEXT: ret void
2486 //
test_vsoxei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t value,size_t vl)2487 void test_vsoxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex,
2488 vint8mf2_t value, size_t vl) {
2489 return vsoxei32_v_i8mf2_m(mask, base, bindex, value, vl);
2490 }
2491
2492 //
2493 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1_m(
2494 // CHECK-RV64-NEXT: entry:
2495 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2496 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2497 // CHECK-RV64-NEXT: ret void
2498 //
test_vsoxei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t value,size_t vl)2499 void test_vsoxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex,
2500 vint8m1_t value, size_t vl) {
2501 return vsoxei32_v_i8m1_m(mask, base, bindex, value, vl);
2502 }
2503
2504 //
2505 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2_m(
2506 // CHECK-RV64-NEXT: entry:
2507 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2508 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2509 // CHECK-RV64-NEXT: ret void
2510 //
test_vsoxei32_v_i8m2_m(vbool4_t mask,int8_t * base,vuint32m8_t bindex,vint8m2_t value,size_t vl)2511 void test_vsoxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex,
2512 vint8m2_t value, size_t vl) {
2513 return vsoxei32_v_i8m2_m(mask, base, bindex, value, vl);
2514 }
2515
2516 //
2517 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8_m(
2518 // CHECK-RV64-NEXT: entry:
2519 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2520 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2521 // CHECK-RV64-NEXT: ret void
2522 //
test_vsoxei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t value,size_t vl)2523 void test_vsoxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex,
2524 vint8mf8_t value, size_t vl) {
2525 return vsoxei64_v_i8mf8_m(mask, base, bindex, value, vl);
2526 }
2527
2528 //
2529 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4_m(
2530 // CHECK-RV64-NEXT: entry:
2531 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2532 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2533 // CHECK-RV64-NEXT: ret void
2534 //
test_vsoxei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t value,size_t vl)2535 void test_vsoxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex,
2536 vint8mf4_t value, size_t vl) {
2537 return vsoxei64_v_i8mf4_m(mask, base, bindex, value, vl);
2538 }
2539
2540 //
2541 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2_m(
2542 // CHECK-RV64-NEXT: entry:
2543 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2544 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2545 // CHECK-RV64-NEXT: ret void
2546 //
test_vsoxei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t value,size_t vl)2547 void test_vsoxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex,
2548 vint8mf2_t value, size_t vl) {
2549 return vsoxei64_v_i8mf2_m(mask, base, bindex, value, vl);
2550 }
2551
2552 //
2553 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1_m(
2554 // CHECK-RV64-NEXT: entry:
2555 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2556 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2557 // CHECK-RV64-NEXT: ret void
2558 //
test_vsoxei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t value,size_t vl)2559 void test_vsoxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex,
2560 vint8m1_t value, size_t vl) {
2561 return vsoxei64_v_i8m1_m(mask, base, bindex, value, vl);
2562 }
2563
2564 //
2565 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4_m(
2566 // CHECK-RV64-NEXT: entry:
2567 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2568 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2569 // CHECK-RV64-NEXT: ret void
2570 //
test_vsoxei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t value,size_t vl)2571 void test_vsoxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex,
2572 vint16mf4_t value, size_t vl) {
2573 return vsoxei8_v_i16mf4_m(mask, base, bindex, value, vl);
2574 }
2575
2576 //
2577 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2_m(
2578 // CHECK-RV64-NEXT: entry:
2579 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2580 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2581 // CHECK-RV64-NEXT: ret void
2582 //
test_vsoxei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t value,size_t vl)2583 void test_vsoxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex,
2584 vint16mf2_t value, size_t vl) {
2585 return vsoxei8_v_i16mf2_m(mask, base, bindex, value, vl);
2586 }
2587
2588 //
2589 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1_m(
2590 // CHECK-RV64-NEXT: entry:
2591 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2592 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2593 // CHECK-RV64-NEXT: ret void
2594 //
test_vsoxei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t value,size_t vl)2595 void test_vsoxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex,
2596 vint16m1_t value, size_t vl) {
2597 return vsoxei8_v_i16m1_m(mask, base, bindex, value, vl);
2598 }
2599
2600 //
2601 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2_m(
2602 // CHECK-RV64-NEXT: entry:
2603 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2604 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2605 // CHECK-RV64-NEXT: ret void
2606 //
test_vsoxei8_v_i16m2_m(vbool8_t mask,int16_t * base,vuint8m1_t bindex,vint16m2_t value,size_t vl)2607 void test_vsoxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex,
2608 vint16m2_t value, size_t vl) {
2609 return vsoxei8_v_i16m2_m(mask, base, bindex, value, vl);
2610 }
2611
2612 //
2613 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4_m(
2614 // CHECK-RV64-NEXT: entry:
2615 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2616 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2617 // CHECK-RV64-NEXT: ret void
2618 //
test_vsoxei8_v_i16m4_m(vbool4_t mask,int16_t * base,vuint8m2_t bindex,vint16m4_t value,size_t vl)2619 void test_vsoxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex,
2620 vint16m4_t value, size_t vl) {
2621 return vsoxei8_v_i16m4_m(mask, base, bindex, value, vl);
2622 }
2623
2624 //
2625 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8_m(
2626 // CHECK-RV64-NEXT: entry:
2627 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
2628 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2629 // CHECK-RV64-NEXT: ret void
2630 //
test_vsoxei8_v_i16m8_m(vbool2_t mask,int16_t * base,vuint8m4_t bindex,vint16m8_t value,size_t vl)2631 void test_vsoxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex,
2632 vint16m8_t value, size_t vl) {
2633 return vsoxei8_v_i16m8_m(mask, base, bindex, value, vl);
2634 }
2635
2636 //
2637 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4_m(
2638 // CHECK-RV64-NEXT: entry:
2639 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2640 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2641 // CHECK-RV64-NEXT: ret void
2642 //
test_vsoxei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t value,size_t vl)2643 void test_vsoxei16_v_i16mf4_m(vbool64_t mask, int16_t *base,
2644 vuint16mf4_t bindex, vint16mf4_t value,
2645 size_t vl) {
2646 return vsoxei16_v_i16mf4_m(mask, base, bindex, value, vl);
2647 }
2648
2649 //
2650 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2_m(
2651 // CHECK-RV64-NEXT: entry:
2652 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2653 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2654 // CHECK-RV64-NEXT: ret void
2655 //
test_vsoxei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t value,size_t vl)2656 void test_vsoxei16_v_i16mf2_m(vbool32_t mask, int16_t *base,
2657 vuint16mf2_t bindex, vint16mf2_t value,
2658 size_t vl) {
2659 return vsoxei16_v_i16mf2_m(mask, base, bindex, value, vl);
2660 }
2661
2662 //
2663 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1_m(
2664 // CHECK-RV64-NEXT: entry:
2665 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2666 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2667 // CHECK-RV64-NEXT: ret void
2668 //
test_vsoxei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t value,size_t vl)2669 void test_vsoxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex,
2670 vint16m1_t value, size_t vl) {
2671 return vsoxei16_v_i16m1_m(mask, base, bindex, value, vl);
2672 }
2673
2674 //
2675 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2_m(
2676 // CHECK-RV64-NEXT: entry:
2677 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2678 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2679 // CHECK-RV64-NEXT: ret void
2680 //
test_vsoxei16_v_i16m2_m(vbool8_t mask,int16_t * base,vuint16m2_t bindex,vint16m2_t value,size_t vl)2681 void test_vsoxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex,
2682 vint16m2_t value, size_t vl) {
2683 return vsoxei16_v_i16m2_m(mask, base, bindex, value, vl);
2684 }
2685
2686 //
2687 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4_m(
2688 // CHECK-RV64-NEXT: entry:
2689 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2690 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2691 // CHECK-RV64-NEXT: ret void
2692 //
test_vsoxei16_v_i16m4_m(vbool4_t mask,int16_t * base,vuint16m4_t bindex,vint16m4_t value,size_t vl)2693 void test_vsoxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex,
2694 vint16m4_t value, size_t vl) {
2695 return vsoxei16_v_i16m4_m(mask, base, bindex, value, vl);
2696 }
2697
2698 //
2699 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8_m(
2700 // CHECK-RV64-NEXT: entry:
2701 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
2702 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2703 // CHECK-RV64-NEXT: ret void
2704 //
test_vsoxei16_v_i16m8_m(vbool2_t mask,int16_t * base,vuint16m8_t bindex,vint16m8_t value,size_t vl)2705 void test_vsoxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex,
2706 vint16m8_t value, size_t vl) {
2707 return vsoxei16_v_i16m8_m(mask, base, bindex, value, vl);
2708 }
2709
2710 //
2711 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4_m(
2712 // CHECK-RV64-NEXT: entry:
2713 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2714 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2715 // CHECK-RV64-NEXT: ret void
2716 //
test_vsoxei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t value,size_t vl)2717 void test_vsoxei32_v_i16mf4_m(vbool64_t mask, int16_t *base,
2718 vuint32mf2_t bindex, vint16mf4_t value,
2719 size_t vl) {
2720 return vsoxei32_v_i16mf4_m(mask, base, bindex, value, vl);
2721 }
2722
2723 //
2724 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2_m(
2725 // CHECK-RV64-NEXT: entry:
2726 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2727 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2728 // CHECK-RV64-NEXT: ret void
2729 //
test_vsoxei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t value,size_t vl)2730 void test_vsoxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex,
2731 vint16mf2_t value, size_t vl) {
2732 return vsoxei32_v_i16mf2_m(mask, base, bindex, value, vl);
2733 }
2734
2735 //
2736 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1_m(
2737 // CHECK-RV64-NEXT: entry:
2738 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2739 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2740 // CHECK-RV64-NEXT: ret void
2741 //
test_vsoxei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t value,size_t vl)2742 void test_vsoxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex,
2743 vint16m1_t value, size_t vl) {
2744 return vsoxei32_v_i16m1_m(mask, base, bindex, value, vl);
2745 }
2746
2747 //
2748 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2_m(
2749 // CHECK-RV64-NEXT: entry:
2750 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2751 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2752 // CHECK-RV64-NEXT: ret void
2753 //
test_vsoxei32_v_i16m2_m(vbool8_t mask,int16_t * base,vuint32m4_t bindex,vint16m2_t value,size_t vl)2754 void test_vsoxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex,
2755 vint16m2_t value, size_t vl) {
2756 return vsoxei32_v_i16m2_m(mask, base, bindex, value, vl);
2757 }
2758
2759 //
2760 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4_m(
2761 // CHECK-RV64-NEXT: entry:
2762 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2763 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2764 // CHECK-RV64-NEXT: ret void
2765 //
test_vsoxei32_v_i16m4_m(vbool4_t mask,int16_t * base,vuint32m8_t bindex,vint16m4_t value,size_t vl)2766 void test_vsoxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex,
2767 vint16m4_t value, size_t vl) {
2768 return vsoxei32_v_i16m4_m(mask, base, bindex, value, vl);
2769 }
2770
2771 //
2772 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4_m(
2773 // CHECK-RV64-NEXT: entry:
2774 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2775 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2776 // CHECK-RV64-NEXT: ret void
2777 //
test_vsoxei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t value,size_t vl)2778 void test_vsoxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex,
2779 vint16mf4_t value, size_t vl) {
2780 return vsoxei64_v_i16mf4_m(mask, base, bindex, value, vl);
2781 }
2782
2783 //
2784 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2_m(
2785 // CHECK-RV64-NEXT: entry:
2786 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2787 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2788 // CHECK-RV64-NEXT: ret void
2789 //
test_vsoxei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t value,size_t vl)2790 void test_vsoxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex,
2791 vint16mf2_t value, size_t vl) {
2792 return vsoxei64_v_i16mf2_m(mask, base, bindex, value, vl);
2793 }
2794
2795 //
2796 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1_m(
2797 // CHECK-RV64-NEXT: entry:
2798 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2799 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2800 // CHECK-RV64-NEXT: ret void
2801 //
test_vsoxei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t value,size_t vl)2802 void test_vsoxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex,
2803 vint16m1_t value, size_t vl) {
2804 return vsoxei64_v_i16m1_m(mask, base, bindex, value, vl);
2805 }
2806
2807 //
2808 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2_m(
2809 // CHECK-RV64-NEXT: entry:
2810 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2811 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2812 // CHECK-RV64-NEXT: ret void
2813 //
test_vsoxei64_v_i16m2_m(vbool8_t mask,int16_t * base,vuint64m8_t bindex,vint16m2_t value,size_t vl)2814 void test_vsoxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex,
2815 vint16m2_t value, size_t vl) {
2816 return vsoxei64_v_i16m2_m(mask, base, bindex, value, vl);
2817 }
2818
2819 //
2820 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2_m(
2821 // CHECK-RV64-NEXT: entry:
2822 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2823 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2824 // CHECK-RV64-NEXT: ret void
2825 //
test_vsoxei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t value,size_t vl)2826 void test_vsoxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex,
2827 vint32mf2_t value, size_t vl) {
2828 return vsoxei8_v_i32mf2_m(mask, base, bindex, value, vl);
2829 }
2830
2831 //
2832 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1_m(
2833 // CHECK-RV64-NEXT: entry:
2834 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2835 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2836 // CHECK-RV64-NEXT: ret void
2837 //
test_vsoxei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t value,size_t vl)2838 void test_vsoxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex,
2839 vint32m1_t value, size_t vl) {
2840 return vsoxei8_v_i32m1_m(mask, base, bindex, value, vl);
2841 }
2842
2843 //
2844 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2_m(
2845 // CHECK-RV64-NEXT: entry:
2846 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2847 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2848 // CHECK-RV64-NEXT: ret void
2849 //
test_vsoxei8_v_i32m2_m(vbool16_t mask,int32_t * base,vuint8mf2_t bindex,vint32m2_t value,size_t vl)2850 void test_vsoxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex,
2851 vint32m2_t value, size_t vl) {
2852 return vsoxei8_v_i32m2_m(mask, base, bindex, value, vl);
2853 }
2854
2855 //
2856 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4_m(
2857 // CHECK-RV64-NEXT: entry:
2858 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2859 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2860 // CHECK-RV64-NEXT: ret void
2861 //
test_vsoxei8_v_i32m4_m(vbool8_t mask,int32_t * base,vuint8m1_t bindex,vint32m4_t value,size_t vl)2862 void test_vsoxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex,
2863 vint32m4_t value, size_t vl) {
2864 return vsoxei8_v_i32m4_m(mask, base, bindex, value, vl);
2865 }
2866
2867 //
2868 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8_m(
2869 // CHECK-RV64-NEXT: entry:
2870 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2871 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2872 // CHECK-RV64-NEXT: ret void
2873 //
test_vsoxei8_v_i32m8_m(vbool4_t mask,int32_t * base,vuint8m2_t bindex,vint32m8_t value,size_t vl)2874 void test_vsoxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex,
2875 vint32m8_t value, size_t vl) {
2876 return vsoxei8_v_i32m8_m(mask, base, bindex, value, vl);
2877 }
2878
2879 //
2880 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2_m(
2881 // CHECK-RV64-NEXT: entry:
2882 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2883 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2884 // CHECK-RV64-NEXT: ret void
2885 //
test_vsoxei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t value,size_t vl)2886 void test_vsoxei16_v_i32mf2_m(vbool64_t mask, int32_t *base,
2887 vuint16mf4_t bindex, vint32mf2_t value,
2888 size_t vl) {
2889 return vsoxei16_v_i32mf2_m(mask, base, bindex, value, vl);
2890 }
2891
2892 //
2893 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1_m(
2894 // CHECK-RV64-NEXT: entry:
2895 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2896 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2897 // CHECK-RV64-NEXT: ret void
2898 //
test_vsoxei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t value,size_t vl)2899 void test_vsoxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex,
2900 vint32m1_t value, size_t vl) {
2901 return vsoxei16_v_i32m1_m(mask, base, bindex, value, vl);
2902 }
2903
2904 //
2905 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2_m(
2906 // CHECK-RV64-NEXT: entry:
2907 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2908 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2909 // CHECK-RV64-NEXT: ret void
2910 //
test_vsoxei16_v_i32m2_m(vbool16_t mask,int32_t * base,vuint16m1_t bindex,vint32m2_t value,size_t vl)2911 void test_vsoxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex,
2912 vint32m2_t value, size_t vl) {
2913 return vsoxei16_v_i32m2_m(mask, base, bindex, value, vl);
2914 }
2915
2916 //
2917 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4_m(
2918 // CHECK-RV64-NEXT: entry:
2919 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2920 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2921 // CHECK-RV64-NEXT: ret void
2922 //
test_vsoxei16_v_i32m4_m(vbool8_t mask,int32_t * base,vuint16m2_t bindex,vint32m4_t value,size_t vl)2923 void test_vsoxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex,
2924 vint32m4_t value, size_t vl) {
2925 return vsoxei16_v_i32m4_m(mask, base, bindex, value, vl);
2926 }
2927
2928 //
2929 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8_m(
2930 // CHECK-RV64-NEXT: entry:
2931 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2932 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2933 // CHECK-RV64-NEXT: ret void
2934 //
test_vsoxei16_v_i32m8_m(vbool4_t mask,int32_t * base,vuint16m4_t bindex,vint32m8_t value,size_t vl)2935 void test_vsoxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex,
2936 vint32m8_t value, size_t vl) {
2937 return vsoxei16_v_i32m8_m(mask, base, bindex, value, vl);
2938 }
2939
2940 //
2941 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2_m(
2942 // CHECK-RV64-NEXT: entry:
2943 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2944 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2945 // CHECK-RV64-NEXT: ret void
2946 //
test_vsoxei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t value,size_t vl)2947 void test_vsoxei32_v_i32mf2_m(vbool64_t mask, int32_t *base,
2948 vuint32mf2_t bindex, vint32mf2_t value,
2949 size_t vl) {
2950 return vsoxei32_v_i32mf2_m(mask, base, bindex, value, vl);
2951 }
2952
2953 //
2954 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1_m(
2955 // CHECK-RV64-NEXT: entry:
2956 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2957 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2958 // CHECK-RV64-NEXT: ret void
2959 //
test_vsoxei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t value,size_t vl)2960 void test_vsoxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex,
2961 vint32m1_t value, size_t vl) {
2962 return vsoxei32_v_i32m1_m(mask, base, bindex, value, vl);
2963 }
2964
2965 //
2966 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2_m(
2967 // CHECK-RV64-NEXT: entry:
2968 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2969 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2970 // CHECK-RV64-NEXT: ret void
2971 //
test_vsoxei32_v_i32m2_m(vbool16_t mask,int32_t * base,vuint32m2_t bindex,vint32m2_t value,size_t vl)2972 void test_vsoxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex,
2973 vint32m2_t value, size_t vl) {
2974 return vsoxei32_v_i32m2_m(mask, base, bindex, value, vl);
2975 }
2976
2977 //
2978 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4_m(
2979 // CHECK-RV64-NEXT: entry:
2980 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2981 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2982 // CHECK-RV64-NEXT: ret void
2983 //
test_vsoxei32_v_i32m4_m(vbool8_t mask,int32_t * base,vuint32m4_t bindex,vint32m4_t value,size_t vl)2984 void test_vsoxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex,
2985 vint32m4_t value, size_t vl) {
2986 return vsoxei32_v_i32m4_m(mask, base, bindex, value, vl);
2987 }
2988
2989 //
2990 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8_m(
2991 // CHECK-RV64-NEXT: entry:
2992 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2993 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2994 // CHECK-RV64-NEXT: ret void
2995 //
test_vsoxei32_v_i32m8_m(vbool4_t mask,int32_t * base,vuint32m8_t bindex,vint32m8_t value,size_t vl)2996 void test_vsoxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex,
2997 vint32m8_t value, size_t vl) {
2998 return vsoxei32_v_i32m8_m(mask, base, bindex, value, vl);
2999 }
3000
3001 //
3002 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2_m(
3003 // CHECK-RV64-NEXT: entry:
3004 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3005 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3006 // CHECK-RV64-NEXT: ret void
3007 //
test_vsoxei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t value,size_t vl)3008 void test_vsoxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex,
3009 vint32mf2_t value, size_t vl) {
3010 return vsoxei64_v_i32mf2_m(mask, base, bindex, value, vl);
3011 }
3012
3013 //
3014 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1_m(
3015 // CHECK-RV64-NEXT: entry:
3016 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3017 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3018 // CHECK-RV64-NEXT: ret void
3019 //
test_vsoxei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t value,size_t vl)3020 void test_vsoxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex,
3021 vint32m1_t value, size_t vl) {
3022 return vsoxei64_v_i32m1_m(mask, base, bindex, value, vl);
3023 }
3024
3025 //
3026 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2_m(
3027 // CHECK-RV64-NEXT: entry:
3028 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3029 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3030 // CHECK-RV64-NEXT: ret void
3031 //
test_vsoxei64_v_i32m2_m(vbool16_t mask,int32_t * base,vuint64m4_t bindex,vint32m2_t value,size_t vl)3032 void test_vsoxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex,
3033 vint32m2_t value, size_t vl) {
3034 return vsoxei64_v_i32m2_m(mask, base, bindex, value, vl);
3035 }
3036
3037 //
3038 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4_m(
3039 // CHECK-RV64-NEXT: entry:
3040 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3041 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3042 // CHECK-RV64-NEXT: ret void
3043 //
test_vsoxei64_v_i32m4_m(vbool8_t mask,int32_t * base,vuint64m8_t bindex,vint32m4_t value,size_t vl)3044 void test_vsoxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex,
3045 vint32m4_t value, size_t vl) {
3046 return vsoxei64_v_i32m4_m(mask, base, bindex, value, vl);
3047 }
3048
3049 //
3050 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1_m(
3051 // CHECK-RV64-NEXT: entry:
3052 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3053 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3054 // CHECK-RV64-NEXT: ret void
3055 //
test_vsoxei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t value,size_t vl)3056 void test_vsoxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex,
3057 vint64m1_t value, size_t vl) {
3058 return vsoxei8_v_i64m1_m(mask, base, bindex, value, vl);
3059 }
3060
3061 //
3062 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2_m(
3063 // CHECK-RV64-NEXT: entry:
3064 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3065 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3066 // CHECK-RV64-NEXT: ret void
3067 //
test_vsoxei8_v_i64m2_m(vbool32_t mask,int64_t * base,vuint8mf4_t bindex,vint64m2_t value,size_t vl)3068 void test_vsoxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex,
3069 vint64m2_t value, size_t vl) {
3070 return vsoxei8_v_i64m2_m(mask, base, bindex, value, vl);
3071 }
3072
3073 //
3074 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4_m(
3075 // CHECK-RV64-NEXT: entry:
3076 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3077 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3078 // CHECK-RV64-NEXT: ret void
3079 //
test_vsoxei8_v_i64m4_m(vbool16_t mask,int64_t * base,vuint8mf2_t bindex,vint64m4_t value,size_t vl)3080 void test_vsoxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex,
3081 vint64m4_t value, size_t vl) {
3082 return vsoxei8_v_i64m4_m(mask, base, bindex, value, vl);
3083 }
3084
3085 //
3086 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8_m(
3087 // CHECK-RV64-NEXT: entry:
3088 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3089 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3090 // CHECK-RV64-NEXT: ret void
3091 //
test_vsoxei8_v_i64m8_m(vbool8_t mask,int64_t * base,vuint8m1_t bindex,vint64m8_t value,size_t vl)3092 void test_vsoxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex,
3093 vint64m8_t value, size_t vl) {
3094 return vsoxei8_v_i64m8_m(mask, base, bindex, value, vl);
3095 }
3096
3097 //
3098 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1_m(
3099 // CHECK-RV64-NEXT: entry:
3100 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3101 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3102 // CHECK-RV64-NEXT: ret void
3103 //
test_vsoxei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t value,size_t vl)3104 void test_vsoxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex,
3105 vint64m1_t value, size_t vl) {
3106 return vsoxei16_v_i64m1_m(mask, base, bindex, value, vl);
3107 }
3108
3109 //
3110 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2_m(
3111 // CHECK-RV64-NEXT: entry:
3112 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3113 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3114 // CHECK-RV64-NEXT: ret void
3115 //
test_vsoxei16_v_i64m2_m(vbool32_t mask,int64_t * base,vuint16mf2_t bindex,vint64m2_t value,size_t vl)3116 void test_vsoxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex,
3117 vint64m2_t value, size_t vl) {
3118 return vsoxei16_v_i64m2_m(mask, base, bindex, value, vl);
3119 }
3120
3121 //
3122 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4_m(
3123 // CHECK-RV64-NEXT: entry:
3124 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3125 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3126 // CHECK-RV64-NEXT: ret void
3127 //
test_vsoxei16_v_i64m4_m(vbool16_t mask,int64_t * base,vuint16m1_t bindex,vint64m4_t value,size_t vl)3128 void test_vsoxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex,
3129 vint64m4_t value, size_t vl) {
3130 return vsoxei16_v_i64m4_m(mask, base, bindex, value, vl);
3131 }
3132
3133 //
3134 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8_m(
3135 // CHECK-RV64-NEXT: entry:
3136 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3137 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3138 // CHECK-RV64-NEXT: ret void
3139 //
test_vsoxei16_v_i64m8_m(vbool8_t mask,int64_t * base,vuint16m2_t bindex,vint64m8_t value,size_t vl)3140 void test_vsoxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex,
3141 vint64m8_t value, size_t vl) {
3142 return vsoxei16_v_i64m8_m(mask, base, bindex, value, vl);
3143 }
3144
3145 //
3146 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1_m(
3147 // CHECK-RV64-NEXT: entry:
3148 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3149 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3150 // CHECK-RV64-NEXT: ret void
3151 //
test_vsoxei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t value,size_t vl)3152 void test_vsoxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex,
3153 vint64m1_t value, size_t vl) {
3154 return vsoxei32_v_i64m1_m(mask, base, bindex, value, vl);
3155 }
3156
3157 //
3158 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2_m(
3159 // CHECK-RV64-NEXT: entry:
3160 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3161 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3162 // CHECK-RV64-NEXT: ret void
3163 //
test_vsoxei32_v_i64m2_m(vbool32_t mask,int64_t * base,vuint32m1_t bindex,vint64m2_t value,size_t vl)3164 void test_vsoxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex,
3165 vint64m2_t value, size_t vl) {
3166 return vsoxei32_v_i64m2_m(mask, base, bindex, value, vl);
3167 }
3168
3169 //
3170 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4_m(
3171 // CHECK-RV64-NEXT: entry:
3172 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3173 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3174 // CHECK-RV64-NEXT: ret void
3175 //
test_vsoxei32_v_i64m4_m(vbool16_t mask,int64_t * base,vuint32m2_t bindex,vint64m4_t value,size_t vl)3176 void test_vsoxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex,
3177 vint64m4_t value, size_t vl) {
3178 return vsoxei32_v_i64m4_m(mask, base, bindex, value, vl);
3179 }
3180
3181 //
3182 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8_m(
3183 // CHECK-RV64-NEXT: entry:
3184 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3185 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3186 // CHECK-RV64-NEXT: ret void
3187 //
test_vsoxei32_v_i64m8_m(vbool8_t mask,int64_t * base,vuint32m4_t bindex,vint64m8_t value,size_t vl)3188 void test_vsoxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex,
3189 vint64m8_t value, size_t vl) {
3190 return vsoxei32_v_i64m8_m(mask, base, bindex, value, vl);
3191 }
3192
3193 //
3194 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1_m(
3195 // CHECK-RV64-NEXT: entry:
3196 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3197 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3198 // CHECK-RV64-NEXT: ret void
3199 //
test_vsoxei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t value,size_t vl)3200 void test_vsoxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex,
3201 vint64m1_t value, size_t vl) {
3202 return vsoxei64_v_i64m1_m(mask, base, bindex, value, vl);
3203 }
3204
3205 //
3206 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2_m(
3207 // CHECK-RV64-NEXT: entry:
3208 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3209 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3210 // CHECK-RV64-NEXT: ret void
3211 //
test_vsoxei64_v_i64m2_m(vbool32_t mask,int64_t * base,vuint64m2_t bindex,vint64m2_t value,size_t vl)3212 void test_vsoxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex,
3213 vint64m2_t value, size_t vl) {
3214 return vsoxei64_v_i64m2_m(mask, base, bindex, value, vl);
3215 }
3216
3217 //
3218 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4_m(
3219 // CHECK-RV64-NEXT: entry:
3220 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3221 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3222 // CHECK-RV64-NEXT: ret void
3223 //
test_vsoxei64_v_i64m4_m(vbool16_t mask,int64_t * base,vuint64m4_t bindex,vint64m4_t value,size_t vl)3224 void test_vsoxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex,
3225 vint64m4_t value, size_t vl) {
3226 return vsoxei64_v_i64m4_m(mask, base, bindex, value, vl);
3227 }
3228
3229 //
3230 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8_m(
3231 // CHECK-RV64-NEXT: entry:
3232 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3233 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3234 // CHECK-RV64-NEXT: ret void
3235 //
test_vsoxei64_v_i64m8_m(vbool8_t mask,int64_t * base,vuint64m8_t bindex,vint64m8_t value,size_t vl)3236 void test_vsoxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex,
3237 vint64m8_t value, size_t vl) {
3238 return vsoxei64_v_i64m8_m(mask, base, bindex, value, vl);
3239 }
3240
3241 //
3242 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8_m(
3243 // CHECK-RV64-NEXT: entry:
3244 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3245 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3246 // CHECK-RV64-NEXT: ret void
3247 //
test_vsoxei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t value,size_t vl)3248 void test_vsoxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex,
3249 vuint8mf8_t value, size_t vl) {
3250 return vsoxei8_v_u8mf8_m(mask, base, bindex, value, vl);
3251 }
3252
3253 //
3254 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4_m(
3255 // CHECK-RV64-NEXT: entry:
3256 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3257 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3258 // CHECK-RV64-NEXT: ret void
3259 //
test_vsoxei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t value,size_t vl)3260 void test_vsoxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex,
3261 vuint8mf4_t value, size_t vl) {
3262 return vsoxei8_v_u8mf4_m(mask, base, bindex, value, vl);
3263 }
3264
3265 //
3266 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2_m(
3267 // CHECK-RV64-NEXT: entry:
3268 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3269 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3270 // CHECK-RV64-NEXT: ret void
3271 //
test_vsoxei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t value,size_t vl)3272 void test_vsoxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex,
3273 vuint8mf2_t value, size_t vl) {
3274 return vsoxei8_v_u8mf2_m(mask, base, bindex, value, vl);
3275 }
3276
3277 //
3278 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1_m(
3279 // CHECK-RV64-NEXT: entry:
3280 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3281 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3282 // CHECK-RV64-NEXT: ret void
3283 //
test_vsoxei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t value,size_t vl)3284 void test_vsoxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex,
3285 vuint8m1_t value, size_t vl) {
3286 return vsoxei8_v_u8m1_m(mask, base, bindex, value, vl);
3287 }
3288
3289 //
3290 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2_m(
3291 // CHECK-RV64-NEXT: entry:
3292 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3293 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3294 // CHECK-RV64-NEXT: ret void
3295 //
test_vsoxei8_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint8m2_t bindex,vuint8m2_t value,size_t vl)3296 void test_vsoxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex,
3297 vuint8m2_t value, size_t vl) {
3298 return vsoxei8_v_u8m2_m(mask, base, bindex, value, vl);
3299 }
3300
3301 //
3302 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4_m(
3303 // CHECK-RV64-NEXT: entry:
3304 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
3305 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3306 // CHECK-RV64-NEXT: ret void
3307 //
test_vsoxei8_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint8m4_t bindex,vuint8m4_t value,size_t vl)3308 void test_vsoxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex,
3309 vuint8m4_t value, size_t vl) {
3310 return vsoxei8_v_u8m4_m(mask, base, bindex, value, vl);
3311 }
3312
3313 //
3314 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8_m(
3315 // CHECK-RV64-NEXT: entry:
3316 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
3317 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3318 // CHECK-RV64-NEXT: ret void
3319 //
test_vsoxei8_v_u8m8_m(vbool1_t mask,uint8_t * base,vuint8m8_t bindex,vuint8m8_t value,size_t vl)3320 void test_vsoxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex,
3321 vuint8m8_t value, size_t vl) {
3322 return vsoxei8_v_u8m8_m(mask, base, bindex, value, vl);
3323 }
3324
3325 //
3326 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8_m(
3327 // CHECK-RV64-NEXT: entry:
3328 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3329 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3330 // CHECK-RV64-NEXT: ret void
3331 //
test_vsoxei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t value,size_t vl)3332 void test_vsoxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex,
3333 vuint8mf8_t value, size_t vl) {
3334 return vsoxei16_v_u8mf8_m(mask, base, bindex, value, vl);
3335 }
3336
3337 //
3338 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4_m(
3339 // CHECK-RV64-NEXT: entry:
3340 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3341 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3342 // CHECK-RV64-NEXT: ret void
3343 //
test_vsoxei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t value,size_t vl)3344 void test_vsoxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex,
3345 vuint8mf4_t value, size_t vl) {
3346 return vsoxei16_v_u8mf4_m(mask, base, bindex, value, vl);
3347 }
3348
3349 //
3350 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2_m(
3351 // CHECK-RV64-NEXT: entry:
3352 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3353 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3354 // CHECK-RV64-NEXT: ret void
3355 //
test_vsoxei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t value,size_t vl)3356 void test_vsoxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex,
3357 vuint8mf2_t value, size_t vl) {
3358 return vsoxei16_v_u8mf2_m(mask, base, bindex, value, vl);
3359 }
3360
3361 //
3362 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1_m(
3363 // CHECK-RV64-NEXT: entry:
3364 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3365 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3366 // CHECK-RV64-NEXT: ret void
3367 //
test_vsoxei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t value,size_t vl)3368 void test_vsoxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex,
3369 vuint8m1_t value, size_t vl) {
3370 return vsoxei16_v_u8m1_m(mask, base, bindex, value, vl);
3371 }
3372
3373 //
3374 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2_m(
3375 // CHECK-RV64-NEXT: entry:
3376 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3377 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3378 // CHECK-RV64-NEXT: ret void
3379 //
test_vsoxei16_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint16m4_t bindex,vuint8m2_t value,size_t vl)3380 void test_vsoxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex,
3381 vuint8m2_t value, size_t vl) {
3382 return vsoxei16_v_u8m2_m(mask, base, bindex, value, vl);
3383 }
3384
3385 //
3386 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4_m(
3387 // CHECK-RV64-NEXT: entry:
3388 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
3389 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3390 // CHECK-RV64-NEXT: ret void
3391 //
test_vsoxei16_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint16m8_t bindex,vuint8m4_t value,size_t vl)3392 void test_vsoxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex,
3393 vuint8m4_t value, size_t vl) {
3394 return vsoxei16_v_u8m4_m(mask, base, bindex, value, vl);
3395 }
3396
3397 //
3398 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8_m(
3399 // CHECK-RV64-NEXT: entry:
3400 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3401 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3402 // CHECK-RV64-NEXT: ret void
3403 //
test_vsoxei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t value,size_t vl)3404 void test_vsoxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex,
3405 vuint8mf8_t value, size_t vl) {
3406 return vsoxei32_v_u8mf8_m(mask, base, bindex, value, vl);
3407 }
3408
3409 //
3410 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4_m(
3411 // CHECK-RV64-NEXT: entry:
3412 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3413 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3414 // CHECK-RV64-NEXT: ret void
3415 //
test_vsoxei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t value,size_t vl)3416 void test_vsoxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex,
3417 vuint8mf4_t value, size_t vl) {
3418 return vsoxei32_v_u8mf4_m(mask, base, bindex, value, vl);
3419 }
3420
3421 //
3422 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2_m(
3423 // CHECK-RV64-NEXT: entry:
3424 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3425 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3426 // CHECK-RV64-NEXT: ret void
3427 //
test_vsoxei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t value,size_t vl)3428 void test_vsoxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex,
3429 vuint8mf2_t value, size_t vl) {
3430 return vsoxei32_v_u8mf2_m(mask, base, bindex, value, vl);
3431 }
3432
3433 //
3434 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1_m(
3435 // CHECK-RV64-NEXT: entry:
3436 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3437 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3438 // CHECK-RV64-NEXT: ret void
3439 //
test_vsoxei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t value,size_t vl)3440 void test_vsoxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex,
3441 vuint8m1_t value, size_t vl) {
3442 return vsoxei32_v_u8m1_m(mask, base, bindex, value, vl);
3443 }
3444
3445 //
3446 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2_m(
3447 // CHECK-RV64-NEXT: entry:
3448 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3449 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3450 // CHECK-RV64-NEXT: ret void
3451 //
test_vsoxei32_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint32m8_t bindex,vuint8m2_t value,size_t vl)3452 void test_vsoxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex,
3453 vuint8m2_t value, size_t vl) {
3454 return vsoxei32_v_u8m2_m(mask, base, bindex, value, vl);
3455 }
3456
3457 //
3458 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8_m(
3459 // CHECK-RV64-NEXT: entry:
3460 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3461 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3462 // CHECK-RV64-NEXT: ret void
3463 //
test_vsoxei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t value,size_t vl)3464 void test_vsoxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex,
3465 vuint8mf8_t value, size_t vl) {
3466 return vsoxei64_v_u8mf8_m(mask, base, bindex, value, vl);
3467 }
3468
3469 //
3470 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4_m(
3471 // CHECK-RV64-NEXT: entry:
3472 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3473 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3474 // CHECK-RV64-NEXT: ret void
3475 //
test_vsoxei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t value,size_t vl)3476 void test_vsoxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex,
3477 vuint8mf4_t value, size_t vl) {
3478 return vsoxei64_v_u8mf4_m(mask, base, bindex, value, vl);
3479 }
3480
3481 //
3482 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2_m(
3483 // CHECK-RV64-NEXT: entry:
3484 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3485 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3486 // CHECK-RV64-NEXT: ret void
3487 //
test_vsoxei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t value,size_t vl)3488 void test_vsoxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex,
3489 vuint8mf2_t value, size_t vl) {
3490 return vsoxei64_v_u8mf2_m(mask, base, bindex, value, vl);
3491 }
3492
3493 //
3494 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1_m(
3495 // CHECK-RV64-NEXT: entry:
3496 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3497 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3498 // CHECK-RV64-NEXT: ret void
3499 //
test_vsoxei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t value,size_t vl)3500 void test_vsoxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex,
3501 vuint8m1_t value, size_t vl) {
3502 return vsoxei64_v_u8m1_m(mask, base, bindex, value, vl);
3503 }
3504
3505 //
3506 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4_m(
3507 // CHECK-RV64-NEXT: entry:
3508 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3509 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3510 // CHECK-RV64-NEXT: ret void
3511 //
test_vsoxei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t value,size_t vl)3512 void test_vsoxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex,
3513 vuint16mf4_t value, size_t vl) {
3514 return vsoxei8_v_u16mf4_m(mask, base, bindex, value, vl);
3515 }
3516
3517 //
3518 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2_m(
3519 // CHECK-RV64-NEXT: entry:
3520 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3521 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3522 // CHECK-RV64-NEXT: ret void
3523 //
test_vsoxei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t value,size_t vl)3524 void test_vsoxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex,
3525 vuint16mf2_t value, size_t vl) {
3526 return vsoxei8_v_u16mf2_m(mask, base, bindex, value, vl);
3527 }
3528
3529 //
3530 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1_m(
3531 // CHECK-RV64-NEXT: entry:
3532 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3533 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3534 // CHECK-RV64-NEXT: ret void
3535 //
test_vsoxei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t value,size_t vl)3536 void test_vsoxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex,
3537 vuint16m1_t value, size_t vl) {
3538 return vsoxei8_v_u16m1_m(mask, base, bindex, value, vl);
3539 }
3540
3541 //
3542 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2_m(
3543 // CHECK-RV64-NEXT: entry:
3544 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3545 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3546 // CHECK-RV64-NEXT: ret void
3547 //
test_vsoxei8_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint8m1_t bindex,vuint16m2_t value,size_t vl)3548 void test_vsoxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex,
3549 vuint16m2_t value, size_t vl) {
3550 return vsoxei8_v_u16m2_m(mask, base, bindex, value, vl);
3551 }
3552
3553 //
3554 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4_m(
3555 // CHECK-RV64-NEXT: entry:
3556 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3557 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3558 // CHECK-RV64-NEXT: ret void
3559 //
test_vsoxei8_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint8m2_t bindex,vuint16m4_t value,size_t vl)3560 void test_vsoxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex,
3561 vuint16m4_t value, size_t vl) {
3562 return vsoxei8_v_u16m4_m(mask, base, bindex, value, vl);
3563 }
3564
3565 //
3566 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8_m(
3567 // CHECK-RV64-NEXT: entry:
3568 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
3569 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3570 // CHECK-RV64-NEXT: ret void
3571 //
test_vsoxei8_v_u16m8_m(vbool2_t mask,uint16_t * base,vuint8m4_t bindex,vuint16m8_t value,size_t vl)3572 void test_vsoxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex,
3573 vuint16m8_t value, size_t vl) {
3574 return vsoxei8_v_u16m8_m(mask, base, bindex, value, vl);
3575 }
3576
3577 //
3578 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4_m(
3579 // CHECK-RV64-NEXT: entry:
3580 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3581 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3582 // CHECK-RV64-NEXT: ret void
3583 //
test_vsoxei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t value,size_t vl)3584 void test_vsoxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3585 vuint16mf4_t bindex, vuint16mf4_t value,
3586 size_t vl) {
3587 return vsoxei16_v_u16mf4_m(mask, base, bindex, value, vl);
3588 }
3589
3590 //
3591 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2_m(
3592 // CHECK-RV64-NEXT: entry:
3593 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3594 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3595 // CHECK-RV64-NEXT: ret void
3596 //
test_vsoxei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t value,size_t vl)3597 void test_vsoxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3598 vuint16mf2_t bindex, vuint16mf2_t value,
3599 size_t vl) {
3600 return vsoxei16_v_u16mf2_m(mask, base, bindex, value, vl);
3601 }
3602
3603 //
3604 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1_m(
3605 // CHECK-RV64-NEXT: entry:
3606 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3607 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3608 // CHECK-RV64-NEXT: ret void
3609 //
test_vsoxei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t value,size_t vl)3610 void test_vsoxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex,
3611 vuint16m1_t value, size_t vl) {
3612 return vsoxei16_v_u16m1_m(mask, base, bindex, value, vl);
3613 }
3614
3615 //
3616 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2_m(
3617 // CHECK-RV64-NEXT: entry:
3618 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3619 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3620 // CHECK-RV64-NEXT: ret void
3621 //
test_vsoxei16_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint16m2_t bindex,vuint16m2_t value,size_t vl)3622 void test_vsoxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex,
3623 vuint16m2_t value, size_t vl) {
3624 return vsoxei16_v_u16m2_m(mask, base, bindex, value, vl);
3625 }
3626
3627 //
3628 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4_m(
3629 // CHECK-RV64-NEXT: entry:
3630 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3631 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3632 // CHECK-RV64-NEXT: ret void
3633 //
test_vsoxei16_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint16m4_t bindex,vuint16m4_t value,size_t vl)3634 void test_vsoxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex,
3635 vuint16m4_t value, size_t vl) {
3636 return vsoxei16_v_u16m4_m(mask, base, bindex, value, vl);
3637 }
3638
3639 //
3640 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8_m(
3641 // CHECK-RV64-NEXT: entry:
3642 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
3643 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3644 // CHECK-RV64-NEXT: ret void
3645 //
test_vsoxei16_v_u16m8_m(vbool2_t mask,uint16_t * base,vuint16m8_t bindex,vuint16m8_t value,size_t vl)3646 void test_vsoxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex,
3647 vuint16m8_t value, size_t vl) {
3648 return vsoxei16_v_u16m8_m(mask, base, bindex, value, vl);
3649 }
3650
3651 //
3652 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4_m(
3653 // CHECK-RV64-NEXT: entry:
3654 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3655 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3656 // CHECK-RV64-NEXT: ret void
3657 //
test_vsoxei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t value,size_t vl)3658 void test_vsoxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3659 vuint32mf2_t bindex, vuint16mf4_t value,
3660 size_t vl) {
3661 return vsoxei32_v_u16mf4_m(mask, base, bindex, value, vl);
3662 }
3663
3664 //
3665 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2_m(
3666 // CHECK-RV64-NEXT: entry:
3667 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3668 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3669 // CHECK-RV64-NEXT: ret void
3670 //
test_vsoxei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t value,size_t vl)3671 void test_vsoxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3672 vuint32m1_t bindex, vuint16mf2_t value,
3673 size_t vl) {
3674 return vsoxei32_v_u16mf2_m(mask, base, bindex, value, vl);
3675 }
3676
3677 //
3678 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1_m(
3679 // CHECK-RV64-NEXT: entry:
3680 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3681 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3682 // CHECK-RV64-NEXT: ret void
3683 //
test_vsoxei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t value,size_t vl)3684 void test_vsoxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex,
3685 vuint16m1_t value, size_t vl) {
3686 return vsoxei32_v_u16m1_m(mask, base, bindex, value, vl);
3687 }
3688
3689 //
3690 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2_m(
3691 // CHECK-RV64-NEXT: entry:
3692 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3693 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3694 // CHECK-RV64-NEXT: ret void
3695 //
test_vsoxei32_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint32m4_t bindex,vuint16m2_t value,size_t vl)3696 void test_vsoxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex,
3697 vuint16m2_t value, size_t vl) {
3698 return vsoxei32_v_u16m2_m(mask, base, bindex, value, vl);
3699 }
3700
3701 //
3702 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4_m(
3703 // CHECK-RV64-NEXT: entry:
3704 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3705 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3706 // CHECK-RV64-NEXT: ret void
3707 //
test_vsoxei32_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint32m8_t bindex,vuint16m4_t value,size_t vl)3708 void test_vsoxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex,
3709 vuint16m4_t value, size_t vl) {
3710 return vsoxei32_v_u16m4_m(mask, base, bindex, value, vl);
3711 }
3712
3713 //
3714 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4_m(
3715 // CHECK-RV64-NEXT: entry:
3716 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3717 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3718 // CHECK-RV64-NEXT: ret void
3719 //
test_vsoxei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t value,size_t vl)3720 void test_vsoxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3721 vuint64m1_t bindex, vuint16mf4_t value,
3722 size_t vl) {
3723 return vsoxei64_v_u16mf4_m(mask, base, bindex, value, vl);
3724 }
3725
3726 //
3727 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2_m(
3728 // CHECK-RV64-NEXT: entry:
3729 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3730 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3731 // CHECK-RV64-NEXT: ret void
3732 //
test_vsoxei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t value,size_t vl)3733 void test_vsoxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3734 vuint64m2_t bindex, vuint16mf2_t value,
3735 size_t vl) {
3736 return vsoxei64_v_u16mf2_m(mask, base, bindex, value, vl);
3737 }
3738
3739 //
3740 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1_m(
3741 // CHECK-RV64-NEXT: entry:
3742 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3743 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3744 // CHECK-RV64-NEXT: ret void
3745 //
test_vsoxei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t value,size_t vl)3746 void test_vsoxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex,
3747 vuint16m1_t value, size_t vl) {
3748 return vsoxei64_v_u16m1_m(mask, base, bindex, value, vl);
3749 }
3750
3751 //
3752 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2_m(
3753 // CHECK-RV64-NEXT: entry:
3754 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3755 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3756 // CHECK-RV64-NEXT: ret void
3757 //
test_vsoxei64_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint64m8_t bindex,vuint16m2_t value,size_t vl)3758 void test_vsoxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex,
3759 vuint16m2_t value, size_t vl) {
3760 return vsoxei64_v_u16m2_m(mask, base, bindex, value, vl);
3761 }
3762
3763 //
3764 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2_m(
3765 // CHECK-RV64-NEXT: entry:
3766 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3767 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3768 // CHECK-RV64-NEXT: ret void
3769 //
test_vsoxei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t value,size_t vl)3770 void test_vsoxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex,
3771 vuint32mf2_t value, size_t vl) {
3772 return vsoxei8_v_u32mf2_m(mask, base, bindex, value, vl);
3773 }
3774
3775 //
3776 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1_m(
3777 // CHECK-RV64-NEXT: entry:
3778 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3779 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3780 // CHECK-RV64-NEXT: ret void
3781 //
test_vsoxei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t value,size_t vl)3782 void test_vsoxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex,
3783 vuint32m1_t value, size_t vl) {
3784 return vsoxei8_v_u32m1_m(mask, base, bindex, value, vl);
3785 }
3786
3787 //
3788 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2_m(
3789 // CHECK-RV64-NEXT: entry:
3790 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3791 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3792 // CHECK-RV64-NEXT: ret void
3793 //
test_vsoxei8_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint8mf2_t bindex,vuint32m2_t value,size_t vl)3794 void test_vsoxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex,
3795 vuint32m2_t value, size_t vl) {
3796 return vsoxei8_v_u32m2_m(mask, base, bindex, value, vl);
3797 }
3798
3799 //
3800 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4_m(
3801 // CHECK-RV64-NEXT: entry:
3802 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3803 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3804 // CHECK-RV64-NEXT: ret void
3805 //
test_vsoxei8_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint8m1_t bindex,vuint32m4_t value,size_t vl)3806 void test_vsoxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex,
3807 vuint32m4_t value, size_t vl) {
3808 return vsoxei8_v_u32m4_m(mask, base, bindex, value, vl);
3809 }
3810
3811 //
3812 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8_m(
3813 // CHECK-RV64-NEXT: entry:
3814 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3815 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3816 // CHECK-RV64-NEXT: ret void
3817 //
test_vsoxei8_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint8m2_t bindex,vuint32m8_t value,size_t vl)3818 void test_vsoxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex,
3819 vuint32m8_t value, size_t vl) {
3820 return vsoxei8_v_u32m8_m(mask, base, bindex, value, vl);
3821 }
3822
3823 //
3824 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2_m(
3825 // CHECK-RV64-NEXT: entry:
3826 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3827 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3828 // CHECK-RV64-NEXT: ret void
3829 //
test_vsoxei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t value,size_t vl)3830 void test_vsoxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3831 vuint16mf4_t bindex, vuint32mf2_t value,
3832 size_t vl) {
3833 return vsoxei16_v_u32mf2_m(mask, base, bindex, value, vl);
3834 }
3835
3836 //
3837 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1_m(
3838 // CHECK-RV64-NEXT: entry:
3839 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3840 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3841 // CHECK-RV64-NEXT: ret void
3842 //
test_vsoxei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t value,size_t vl)3843 void test_vsoxei16_v_u32m1_m(vbool32_t mask, uint32_t *base,
3844 vuint16mf2_t bindex, vuint32m1_t value,
3845 size_t vl) {
3846 return vsoxei16_v_u32m1_m(mask, base, bindex, value, vl);
3847 }
3848
3849 //
3850 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2_m(
3851 // CHECK-RV64-NEXT: entry:
3852 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3853 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3854 // CHECK-RV64-NEXT: ret void
3855 //
test_vsoxei16_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint16m1_t bindex,vuint32m2_t value,size_t vl)3856 void test_vsoxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex,
3857 vuint32m2_t value, size_t vl) {
3858 return vsoxei16_v_u32m2_m(mask, base, bindex, value, vl);
3859 }
3860
3861 //
3862 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4_m(
3863 // CHECK-RV64-NEXT: entry:
3864 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3865 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3866 // CHECK-RV64-NEXT: ret void
3867 //
test_vsoxei16_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint16m2_t bindex,vuint32m4_t value,size_t vl)3868 void test_vsoxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex,
3869 vuint32m4_t value, size_t vl) {
3870 return vsoxei16_v_u32m4_m(mask, base, bindex, value, vl);
3871 }
3872
3873 //
3874 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8_m(
3875 // CHECK-RV64-NEXT: entry:
3876 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3877 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3878 // CHECK-RV64-NEXT: ret void
3879 //
test_vsoxei16_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint16m4_t bindex,vuint32m8_t value,size_t vl)3880 void test_vsoxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex,
3881 vuint32m8_t value, size_t vl) {
3882 return vsoxei16_v_u32m8_m(mask, base, bindex, value, vl);
3883 }
3884
3885 //
3886 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2_m(
3887 // CHECK-RV64-NEXT: entry:
3888 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3889 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3890 // CHECK-RV64-NEXT: ret void
3891 //
test_vsoxei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t value,size_t vl)3892 void test_vsoxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3893 vuint32mf2_t bindex, vuint32mf2_t value,
3894 size_t vl) {
3895 return vsoxei32_v_u32mf2_m(mask, base, bindex, value, vl);
3896 }
3897
3898 //
3899 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1_m(
3900 // CHECK-RV64-NEXT: entry:
3901 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3902 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3903 // CHECK-RV64-NEXT: ret void
3904 //
test_vsoxei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t value,size_t vl)3905 void test_vsoxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex,
3906 vuint32m1_t value, size_t vl) {
3907 return vsoxei32_v_u32m1_m(mask, base, bindex, value, vl);
3908 }
3909
3910 //
3911 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2_m(
3912 // CHECK-RV64-NEXT: entry:
3913 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3914 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3915 // CHECK-RV64-NEXT: ret void
3916 //
test_vsoxei32_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint32m2_t bindex,vuint32m2_t value,size_t vl)3917 void test_vsoxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex,
3918 vuint32m2_t value, size_t vl) {
3919 return vsoxei32_v_u32m2_m(mask, base, bindex, value, vl);
3920 }
3921
3922 //
3923 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4_m(
3924 // CHECK-RV64-NEXT: entry:
3925 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3926 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3927 // CHECK-RV64-NEXT: ret void
3928 //
test_vsoxei32_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint32m4_t bindex,vuint32m4_t value,size_t vl)3929 void test_vsoxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex,
3930 vuint32m4_t value, size_t vl) {
3931 return vsoxei32_v_u32m4_m(mask, base, bindex, value, vl);
3932 }
3933
3934 //
3935 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8_m(
3936 // CHECK-RV64-NEXT: entry:
3937 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3938 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3939 // CHECK-RV64-NEXT: ret void
3940 //
test_vsoxei32_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint32m8_t bindex,vuint32m8_t value,size_t vl)3941 void test_vsoxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex,
3942 vuint32m8_t value, size_t vl) {
3943 return vsoxei32_v_u32m8_m(mask, base, bindex, value, vl);
3944 }
3945
3946 //
3947 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2_m(
3948 // CHECK-RV64-NEXT: entry:
3949 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3950 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3951 // CHECK-RV64-NEXT: ret void
3952 //
test_vsoxei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t value,size_t vl)3953 void test_vsoxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3954 vuint64m1_t bindex, vuint32mf2_t value,
3955 size_t vl) {
3956 return vsoxei64_v_u32mf2_m(mask, base, bindex, value, vl);
3957 }
3958
3959 //
3960 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1_m(
3961 // CHECK-RV64-NEXT: entry:
3962 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3963 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3964 // CHECK-RV64-NEXT: ret void
3965 //
test_vsoxei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t value,size_t vl)3966 void test_vsoxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex,
3967 vuint32m1_t value, size_t vl) {
3968 return vsoxei64_v_u32m1_m(mask, base, bindex, value, vl);
3969 }
3970
3971 //
3972 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2_m(
3973 // CHECK-RV64-NEXT: entry:
3974 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3975 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3976 // CHECK-RV64-NEXT: ret void
3977 //
test_vsoxei64_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint64m4_t bindex,vuint32m2_t value,size_t vl)3978 void test_vsoxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex,
3979 vuint32m2_t value, size_t vl) {
3980 return vsoxei64_v_u32m2_m(mask, base, bindex, value, vl);
3981 }
3982
3983 //
3984 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4_m(
3985 // CHECK-RV64-NEXT: entry:
3986 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3987 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3988 // CHECK-RV64-NEXT: ret void
3989 //
test_vsoxei64_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint64m8_t bindex,vuint32m4_t value,size_t vl)3990 void test_vsoxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex,
3991 vuint32m4_t value, size_t vl) {
3992 return vsoxei64_v_u32m4_m(mask, base, bindex, value, vl);
3993 }
3994
3995 //
3996 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1_m(
3997 // CHECK-RV64-NEXT: entry:
3998 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3999 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4000 // CHECK-RV64-NEXT: ret void
4001 //
test_vsoxei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t value,size_t vl)4002 void test_vsoxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex,
4003 vuint64m1_t value, size_t vl) {
4004 return vsoxei8_v_u64m1_m(mask, base, bindex, value, vl);
4005 }
4006
4007 //
4008 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2_m(
4009 // CHECK-RV64-NEXT: entry:
4010 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
4011 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4012 // CHECK-RV64-NEXT: ret void
4013 //
test_vsoxei8_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint8mf4_t bindex,vuint64m2_t value,size_t vl)4014 void test_vsoxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex,
4015 vuint64m2_t value, size_t vl) {
4016 return vsoxei8_v_u64m2_m(mask, base, bindex, value, vl);
4017 }
4018
4019 //
4020 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4_m(
4021 // CHECK-RV64-NEXT: entry:
4022 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
4023 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4024 // CHECK-RV64-NEXT: ret void
4025 //
test_vsoxei8_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint8mf2_t bindex,vuint64m4_t value,size_t vl)4026 void test_vsoxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex,
4027 vuint64m4_t value, size_t vl) {
4028 return vsoxei8_v_u64m4_m(mask, base, bindex, value, vl);
4029 }
4030
4031 //
4032 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8_m(
4033 // CHECK-RV64-NEXT: entry:
4034 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4035 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4036 // CHECK-RV64-NEXT: ret void
4037 //
test_vsoxei8_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint8m1_t bindex,vuint64m8_t value,size_t vl)4038 void test_vsoxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex,
4039 vuint64m8_t value, size_t vl) {
4040 return vsoxei8_v_u64m8_m(mask, base, bindex, value, vl);
4041 }
4042
4043 //
4044 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1_m(
4045 // CHECK-RV64-NEXT: entry:
4046 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
4047 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4048 // CHECK-RV64-NEXT: ret void
4049 //
test_vsoxei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t value,size_t vl)4050 void test_vsoxei16_v_u64m1_m(vbool64_t mask, uint64_t *base,
4051 vuint16mf4_t bindex, vuint64m1_t value,
4052 size_t vl) {
4053 return vsoxei16_v_u64m1_m(mask, base, bindex, value, vl);
4054 }
4055
4056 //
4057 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2_m(
4058 // CHECK-RV64-NEXT: entry:
4059 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
4060 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4061 // CHECK-RV64-NEXT: ret void
4062 //
test_vsoxei16_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint16mf2_t bindex,vuint64m2_t value,size_t vl)4063 void test_vsoxei16_v_u64m2_m(vbool32_t mask, uint64_t *base,
4064 vuint16mf2_t bindex, vuint64m2_t value,
4065 size_t vl) {
4066 return vsoxei16_v_u64m2_m(mask, base, bindex, value, vl);
4067 }
4068
4069 //
4070 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4_m(
4071 // CHECK-RV64-NEXT: entry:
4072 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
4073 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4074 // CHECK-RV64-NEXT: ret void
4075 //
test_vsoxei16_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint16m1_t bindex,vuint64m4_t value,size_t vl)4076 void test_vsoxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex,
4077 vuint64m4_t value, size_t vl) {
4078 return vsoxei16_v_u64m4_m(mask, base, bindex, value, vl);
4079 }
4080
4081 //
4082 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8_m(
4083 // CHECK-RV64-NEXT: entry:
4084 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4085 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4086 // CHECK-RV64-NEXT: ret void
4087 //
test_vsoxei16_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint16m2_t bindex,vuint64m8_t value,size_t vl)4088 void test_vsoxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex,
4089 vuint64m8_t value, size_t vl) {
4090 return vsoxei16_v_u64m8_m(mask, base, bindex, value, vl);
4091 }
4092
4093 //
4094 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1_m(
4095 // CHECK-RV64-NEXT: entry:
4096 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
4097 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4098 // CHECK-RV64-NEXT: ret void
4099 //
test_vsoxei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t value,size_t vl)4100 void test_vsoxei32_v_u64m1_m(vbool64_t mask, uint64_t *base,
4101 vuint32mf2_t bindex, vuint64m1_t value,
4102 size_t vl) {
4103 return vsoxei32_v_u64m1_m(mask, base, bindex, value, vl);
4104 }
4105
4106 //
4107 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2_m(
4108 // CHECK-RV64-NEXT: entry:
4109 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
4110 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4111 // CHECK-RV64-NEXT: ret void
4112 //
test_vsoxei32_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint32m1_t bindex,vuint64m2_t value,size_t vl)4113 void test_vsoxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex,
4114 vuint64m2_t value, size_t vl) {
4115 return vsoxei32_v_u64m2_m(mask, base, bindex, value, vl);
4116 }
4117
4118 //
4119 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4_m(
4120 // CHECK-RV64-NEXT: entry:
4121 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
4122 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4123 // CHECK-RV64-NEXT: ret void
4124 //
test_vsoxei32_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint32m2_t bindex,vuint64m4_t value,size_t vl)4125 void test_vsoxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex,
4126 vuint64m4_t value, size_t vl) {
4127 return vsoxei32_v_u64m4_m(mask, base, bindex, value, vl);
4128 }
4129
4130 //
4131 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8_m(
4132 // CHECK-RV64-NEXT: entry:
4133 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4134 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4135 // CHECK-RV64-NEXT: ret void
4136 //
test_vsoxei32_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint32m4_t bindex,vuint64m8_t value,size_t vl)4137 void test_vsoxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex,
4138 vuint64m8_t value, size_t vl) {
4139 return vsoxei32_v_u64m8_m(mask, base, bindex, value, vl);
4140 }
4141
4142 //
4143 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1_m(
4144 // CHECK-RV64-NEXT: entry:
4145 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
4146 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4147 // CHECK-RV64-NEXT: ret void
4148 //
test_vsoxei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t value,size_t vl)4149 void test_vsoxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex,
4150 vuint64m1_t value, size_t vl) {
4151 return vsoxei64_v_u64m1_m(mask, base, bindex, value, vl);
4152 }
4153
4154 //
4155 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2_m(
4156 // CHECK-RV64-NEXT: entry:
4157 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
4158 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4159 // CHECK-RV64-NEXT: ret void
4160 //
test_vsoxei64_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint64m2_t bindex,vuint64m2_t value,size_t vl)4161 void test_vsoxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex,
4162 vuint64m2_t value, size_t vl) {
4163 return vsoxei64_v_u64m2_m(mask, base, bindex, value, vl);
4164 }
4165
4166 //
4167 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4_m(
4168 // CHECK-RV64-NEXT: entry:
4169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
4170 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4171 // CHECK-RV64-NEXT: ret void
4172 //
test_vsoxei64_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint64m4_t bindex,vuint64m4_t value,size_t vl)4173 void test_vsoxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex,
4174 vuint64m4_t value, size_t vl) {
4175 return vsoxei64_v_u64m4_m(mask, base, bindex, value, vl);
4176 }
4177
4178 //
4179 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8_m(
4180 // CHECK-RV64-NEXT: entry:
4181 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4182 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4183 // CHECK-RV64-NEXT: ret void
4184 //
test_vsoxei64_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint64m8_t bindex,vuint64m8_t value,size_t vl)4185 void test_vsoxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex,
4186 vuint64m8_t value, size_t vl) {
4187 return vsoxei64_v_u64m8_m(mask, base, bindex, value, vl);
4188 }
4189
4190 //
4191 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2_m(
4192 // CHECK-RV64-NEXT: entry:
4193 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4194 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4195 // CHECK-RV64-NEXT: ret void
4196 //
test_vsoxei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t value,size_t vl)4197 void test_vsoxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex,
4198 vfloat32mf2_t value, size_t vl) {
4199 return vsoxei8_v_f32mf2_m(mask, base, bindex, value, vl);
4200 }
4201
4202 //
4203 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1_m(
4204 // CHECK-RV64-NEXT: entry:
4205 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4206 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4207 // CHECK-RV64-NEXT: ret void
4208 //
test_vsoxei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t value,size_t vl)4209 void test_vsoxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex,
4210 vfloat32m1_t value, size_t vl) {
4211 return vsoxei8_v_f32m1_m(mask, base, bindex, value, vl);
4212 }
4213
4214 //
4215 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2_m(
4216 // CHECK-RV64-NEXT: entry:
4217 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4218 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4219 // CHECK-RV64-NEXT: ret void
4220 //
test_vsoxei8_v_f32m2_m(vbool16_t mask,float * base,vuint8mf2_t bindex,vfloat32m2_t value,size_t vl)4221 void test_vsoxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex,
4222 vfloat32m2_t value, size_t vl) {
4223 return vsoxei8_v_f32m2_m(mask, base, bindex, value, vl);
4224 }
4225
4226 //
4227 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4_m(
4228 // CHECK-RV64-NEXT: entry:
4229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4230 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4231 // CHECK-RV64-NEXT: ret void
4232 //
test_vsoxei8_v_f32m4_m(vbool8_t mask,float * base,vuint8m1_t bindex,vfloat32m4_t value,size_t vl)4233 void test_vsoxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex,
4234 vfloat32m4_t value, size_t vl) {
4235 return vsoxei8_v_f32m4_m(mask, base, bindex, value, vl);
4236 }
4237
4238 //
4239 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8_m(
4240 // CHECK-RV64-NEXT: entry:
4241 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4242 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4243 // CHECK-RV64-NEXT: ret void
4244 //
test_vsoxei8_v_f32m8_m(vbool4_t mask,float * base,vuint8m2_t bindex,vfloat32m8_t value,size_t vl)4245 void test_vsoxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex,
4246 vfloat32m8_t value, size_t vl) {
4247 return vsoxei8_v_f32m8_m(mask, base, bindex, value, vl);
4248 }
4249
4250 //
4251 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2_m(
4252 // CHECK-RV64-NEXT: entry:
4253 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4254 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4255 // CHECK-RV64-NEXT: ret void
4256 //
test_vsoxei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t value,size_t vl)4257 void test_vsoxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex,
4258 vfloat32mf2_t value, size_t vl) {
4259 return vsoxei16_v_f32mf2_m(mask, base, bindex, value, vl);
4260 }
4261
4262 //
4263 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1_m(
4264 // CHECK-RV64-NEXT: entry:
4265 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4266 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4267 // CHECK-RV64-NEXT: ret void
4268 //
test_vsoxei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t value,size_t vl)4269 void test_vsoxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex,
4270 vfloat32m1_t value, size_t vl) {
4271 return vsoxei16_v_f32m1_m(mask, base, bindex, value, vl);
4272 }
4273
4274 //
4275 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2_m(
4276 // CHECK-RV64-NEXT: entry:
4277 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4278 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4279 // CHECK-RV64-NEXT: ret void
4280 //
test_vsoxei16_v_f32m2_m(vbool16_t mask,float * base,vuint16m1_t bindex,vfloat32m2_t value,size_t vl)4281 void test_vsoxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex,
4282 vfloat32m2_t value, size_t vl) {
4283 return vsoxei16_v_f32m2_m(mask, base, bindex, value, vl);
4284 }
4285
4286 //
4287 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4_m(
4288 // CHECK-RV64-NEXT: entry:
4289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4290 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4291 // CHECK-RV64-NEXT: ret void
4292 //
test_vsoxei16_v_f32m4_m(vbool8_t mask,float * base,vuint16m2_t bindex,vfloat32m4_t value,size_t vl)4293 void test_vsoxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex,
4294 vfloat32m4_t value, size_t vl) {
4295 return vsoxei16_v_f32m4_m(mask, base, bindex, value, vl);
4296 }
4297
4298 //
4299 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8_m(
4300 // CHECK-RV64-NEXT: entry:
4301 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4302 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4303 // CHECK-RV64-NEXT: ret void
4304 //
test_vsoxei16_v_f32m8_m(vbool4_t mask,float * base,vuint16m4_t bindex,vfloat32m8_t value,size_t vl)4305 void test_vsoxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex,
4306 vfloat32m8_t value, size_t vl) {
4307 return vsoxei16_v_f32m8_m(mask, base, bindex, value, vl);
4308 }
4309
4310 //
4311 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2_m(
4312 // CHECK-RV64-NEXT: entry:
4313 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4314 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4315 // CHECK-RV64-NEXT: ret void
4316 //
test_vsoxei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t value,size_t vl)4317 void test_vsoxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex,
4318 vfloat32mf2_t value, size_t vl) {
4319 return vsoxei32_v_f32mf2_m(mask, base, bindex, value, vl);
4320 }
4321
4322 //
4323 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1_m(
4324 // CHECK-RV64-NEXT: entry:
4325 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4326 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4327 // CHECK-RV64-NEXT: ret void
4328 //
test_vsoxei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t value,size_t vl)4329 void test_vsoxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex,
4330 vfloat32m1_t value, size_t vl) {
4331 return vsoxei32_v_f32m1_m(mask, base, bindex, value, vl);
4332 }
4333
4334 //
4335 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2_m(
4336 // CHECK-RV64-NEXT: entry:
4337 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4338 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4339 // CHECK-RV64-NEXT: ret void
4340 //
test_vsoxei32_v_f32m2_m(vbool16_t mask,float * base,vuint32m2_t bindex,vfloat32m2_t value,size_t vl)4341 void test_vsoxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex,
4342 vfloat32m2_t value, size_t vl) {
4343 return vsoxei32_v_f32m2_m(mask, base, bindex, value, vl);
4344 }
4345
4346 //
4347 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4_m(
4348 // CHECK-RV64-NEXT: entry:
4349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4350 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4351 // CHECK-RV64-NEXT: ret void
4352 //
test_vsoxei32_v_f32m4_m(vbool8_t mask,float * base,vuint32m4_t bindex,vfloat32m4_t value,size_t vl)4353 void test_vsoxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex,
4354 vfloat32m4_t value, size_t vl) {
4355 return vsoxei32_v_f32m4_m(mask, base, bindex, value, vl);
4356 }
4357
4358 //
4359 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8_m(
4360 // CHECK-RV64-NEXT: entry:
4361 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4362 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4363 // CHECK-RV64-NEXT: ret void
4364 //
test_vsoxei32_v_f32m8_m(vbool4_t mask,float * base,vuint32m8_t bindex,vfloat32m8_t value,size_t vl)4365 void test_vsoxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex,
4366 vfloat32m8_t value, size_t vl) {
4367 return vsoxei32_v_f32m8_m(mask, base, bindex, value, vl);
4368 }
4369
4370 //
4371 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2_m(
4372 // CHECK-RV64-NEXT: entry:
4373 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4374 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4375 // CHECK-RV64-NEXT: ret void
4376 //
test_vsoxei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t value,size_t vl)4377 void test_vsoxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex,
4378 vfloat32mf2_t value, size_t vl) {
4379 return vsoxei64_v_f32mf2_m(mask, base, bindex, value, vl);
4380 }
4381
4382 //
4383 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1_m(
4384 // CHECK-RV64-NEXT: entry:
4385 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4386 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4387 // CHECK-RV64-NEXT: ret void
4388 //
test_vsoxei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t value,size_t vl)4389 void test_vsoxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex,
4390 vfloat32m1_t value, size_t vl) {
4391 return vsoxei64_v_f32m1_m(mask, base, bindex, value, vl);
4392 }
4393
4394 //
4395 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2_m(
4396 // CHECK-RV64-NEXT: entry:
4397 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4398 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4399 // CHECK-RV64-NEXT: ret void
4400 //
test_vsoxei64_v_f32m2_m(vbool16_t mask,float * base,vuint64m4_t bindex,vfloat32m2_t value,size_t vl)4401 void test_vsoxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex,
4402 vfloat32m2_t value, size_t vl) {
4403 return vsoxei64_v_f32m2_m(mask, base, bindex, value, vl);
4404 }
4405
4406 //
4407 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4_m(
4408 // CHECK-RV64-NEXT: entry:
4409 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4410 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4411 // CHECK-RV64-NEXT: ret void
4412 //
test_vsoxei64_v_f32m4_m(vbool8_t mask,float * base,vuint64m8_t bindex,vfloat32m4_t value,size_t vl)4413 void test_vsoxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex,
4414 vfloat32m4_t value, size_t vl) {
4415 return vsoxei64_v_f32m4_m(mask, base, bindex, value, vl);
4416 }
4417
4418 //
4419 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1_m(
4420 // CHECK-RV64-NEXT: entry:
4421 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4422 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4423 // CHECK-RV64-NEXT: ret void
4424 //
test_vsoxei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t value,size_t vl)4425 void test_vsoxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex,
4426 vfloat64m1_t value, size_t vl) {
4427 return vsoxei8_v_f64m1_m(mask, base, bindex, value, vl);
4428 }
4429
4430 //
4431 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2_m(
4432 // CHECK-RV64-NEXT: entry:
4433 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4434 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4435 // CHECK-RV64-NEXT: ret void
4436 //
test_vsoxei8_v_f64m2_m(vbool32_t mask,double * base,vuint8mf4_t bindex,vfloat64m2_t value,size_t vl)4437 void test_vsoxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex,
4438 vfloat64m2_t value, size_t vl) {
4439 return vsoxei8_v_f64m2_m(mask, base, bindex, value, vl);
4440 }
4441
4442 //
4443 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4_m(
4444 // CHECK-RV64-NEXT: entry:
4445 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4446 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4447 // CHECK-RV64-NEXT: ret void
4448 //
test_vsoxei8_v_f64m4_m(vbool16_t mask,double * base,vuint8mf2_t bindex,vfloat64m4_t value,size_t vl)4449 void test_vsoxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex,
4450 vfloat64m4_t value, size_t vl) {
4451 return vsoxei8_v_f64m4_m(mask, base, bindex, value, vl);
4452 }
4453
4454 //
4455 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8_m(
4456 // CHECK-RV64-NEXT: entry:
4457 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4458 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4459 // CHECK-RV64-NEXT: ret void
4460 //
test_vsoxei8_v_f64m8_m(vbool8_t mask,double * base,vuint8m1_t bindex,vfloat64m8_t value,size_t vl)4461 void test_vsoxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex,
4462 vfloat64m8_t value, size_t vl) {
4463 return vsoxei8_v_f64m8_m(mask, base, bindex, value, vl);
4464 }
4465
4466 //
4467 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1_m(
4468 // CHECK-RV64-NEXT: entry:
4469 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4470 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4471 // CHECK-RV64-NEXT: ret void
4472 //
test_vsoxei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t value,size_t vl)4473 void test_vsoxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex,
4474 vfloat64m1_t value, size_t vl) {
4475 return vsoxei16_v_f64m1_m(mask, base, bindex, value, vl);
4476 }
4477
4478 //
4479 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2_m(
4480 // CHECK-RV64-NEXT: entry:
4481 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4482 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4483 // CHECK-RV64-NEXT: ret void
4484 //
test_vsoxei16_v_f64m2_m(vbool32_t mask,double * base,vuint16mf2_t bindex,vfloat64m2_t value,size_t vl)4485 void test_vsoxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex,
4486 vfloat64m2_t value, size_t vl) {
4487 return vsoxei16_v_f64m2_m(mask, base, bindex, value, vl);
4488 }
4489
4490 //
4491 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4_m(
4492 // CHECK-RV64-NEXT: entry:
4493 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4494 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4495 // CHECK-RV64-NEXT: ret void
4496 //
test_vsoxei16_v_f64m4_m(vbool16_t mask,double * base,vuint16m1_t bindex,vfloat64m4_t value,size_t vl)4497 void test_vsoxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex,
4498 vfloat64m4_t value, size_t vl) {
4499 return vsoxei16_v_f64m4_m(mask, base, bindex, value, vl);
4500 }
4501
4502 //
4503 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8_m(
4504 // CHECK-RV64-NEXT: entry:
4505 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4506 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4507 // CHECK-RV64-NEXT: ret void
4508 //
test_vsoxei16_v_f64m8_m(vbool8_t mask,double * base,vuint16m2_t bindex,vfloat64m8_t value,size_t vl)4509 void test_vsoxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex,
4510 vfloat64m8_t value, size_t vl) {
4511 return vsoxei16_v_f64m8_m(mask, base, bindex, value, vl);
4512 }
4513
4514 //
4515 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1_m(
4516 // CHECK-RV64-NEXT: entry:
4517 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4518 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4519 // CHECK-RV64-NEXT: ret void
4520 //
test_vsoxei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t value,size_t vl)4521 void test_vsoxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex,
4522 vfloat64m1_t value, size_t vl) {
4523 return vsoxei32_v_f64m1_m(mask, base, bindex, value, vl);
4524 }
4525
4526 //
4527 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2_m(
4528 // CHECK-RV64-NEXT: entry:
4529 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4530 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4531 // CHECK-RV64-NEXT: ret void
4532 //
test_vsoxei32_v_f64m2_m(vbool32_t mask,double * base,vuint32m1_t bindex,vfloat64m2_t value,size_t vl)4533 void test_vsoxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex,
4534 vfloat64m2_t value, size_t vl) {
4535 return vsoxei32_v_f64m2_m(mask, base, bindex, value, vl);
4536 }
4537
4538 //
4539 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4_m(
4540 // CHECK-RV64-NEXT: entry:
4541 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4542 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4543 // CHECK-RV64-NEXT: ret void
4544 //
test_vsoxei32_v_f64m4_m(vbool16_t mask,double * base,vuint32m2_t bindex,vfloat64m4_t value,size_t vl)4545 void test_vsoxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex,
4546 vfloat64m4_t value, size_t vl) {
4547 return vsoxei32_v_f64m4_m(mask, base, bindex, value, vl);
4548 }
4549
4550 //
4551 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8_m(
4552 // CHECK-RV64-NEXT: entry:
4553 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4554 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4555 // CHECK-RV64-NEXT: ret void
4556 //
test_vsoxei32_v_f64m8_m(vbool8_t mask,double * base,vuint32m4_t bindex,vfloat64m8_t value,size_t vl)4557 void test_vsoxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex,
4558 vfloat64m8_t value, size_t vl) {
4559 return vsoxei32_v_f64m8_m(mask, base, bindex, value, vl);
4560 }
4561
4562 //
4563 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1_m(
4564 // CHECK-RV64-NEXT: entry:
4565 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4566 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4567 // CHECK-RV64-NEXT: ret void
4568 //
test_vsoxei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t value,size_t vl)4569 void test_vsoxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex,
4570 vfloat64m1_t value, size_t vl) {
4571 return vsoxei64_v_f64m1_m(mask, base, bindex, value, vl);
4572 }
4573
4574 //
4575 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2_m(
4576 // CHECK-RV64-NEXT: entry:
4577 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4578 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4579 // CHECK-RV64-NEXT: ret void
4580 //
test_vsoxei64_v_f64m2_m(vbool32_t mask,double * base,vuint64m2_t bindex,vfloat64m2_t value,size_t vl)4581 void test_vsoxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex,
4582 vfloat64m2_t value, size_t vl) {
4583 return vsoxei64_v_f64m2_m(mask, base, bindex, value, vl);
4584 }
4585
4586 //
4587 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4_m(
4588 // CHECK-RV64-NEXT: entry:
4589 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4590 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4591 // CHECK-RV64-NEXT: ret void
4592 //
test_vsoxei64_v_f64m4_m(vbool16_t mask,double * base,vuint64m4_t bindex,vfloat64m4_t value,size_t vl)4593 void test_vsoxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex,
4594 vfloat64m4_t value, size_t vl) {
4595 return vsoxei64_v_f64m4_m(mask, base, bindex, value, vl);
4596 }
4597
4598 //
4599 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8_m(
4600 // CHECK-RV64-NEXT: entry:
4601 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4602 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4603 // CHECK-RV64-NEXT: ret void
4604 //
test_vsoxei64_v_f64m8_m(vbool8_t mask,double * base,vuint64m8_t bindex,vfloat64m8_t value,size_t vl)4605 void test_vsoxei64_v_f64m8_m(vbool8_t mask, double *base, vuint64m8_t bindex,
4606 vfloat64m8_t value, size_t vl) {
4607 return vsoxei64_v_f64m8_m(mask, base, bindex, value, vl);
4608 }
4609