1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \
4 // RUN: -target-feature +experimental-v -target-feature +experimental-zfh \
5 // RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
6
7 #include <riscv_vector.h>
8
9 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8(
10 // CHECK-RV64-NEXT: entry:
11 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
12 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
13 // CHECK-RV64-NEXT: ret void
14 //
test_vsoxei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t value,size_t vl)15 void test_vsoxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value,
16 size_t vl) {
17 return vsoxei8_v_i8mf8(base, bindex, value, vl);
18 }
19
20 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4(
21 // CHECK-RV64-NEXT: entry:
22 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
23 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
24 // CHECK-RV64-NEXT: ret void
25 //
test_vsoxei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t value,size_t vl)26 void test_vsoxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value,
27 size_t vl) {
28 return vsoxei8_v_i8mf4(base, bindex, value, vl);
29 }
30
31 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2(
32 // CHECK-RV64-NEXT: entry:
33 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
34 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
35 // CHECK-RV64-NEXT: ret void
36 //
test_vsoxei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t value,size_t vl)37 void test_vsoxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value,
38 size_t vl) {
39 return vsoxei8_v_i8mf2(base, bindex, value, vl);
40 }
41
42 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1(
43 // CHECK-RV64-NEXT: entry:
44 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
45 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
46 // CHECK-RV64-NEXT: ret void
47 //
test_vsoxei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t value,size_t vl)48 void test_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value,
49 size_t vl) {
50 return vsoxei8_v_i8m1(base, bindex, value, vl);
51 }
52
53 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2(
54 // CHECK-RV64-NEXT: entry:
55 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
56 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
57 // CHECK-RV64-NEXT: ret void
58 //
test_vsoxei8_v_i8m2(int8_t * base,vuint8m2_t bindex,vint8m2_t value,size_t vl)59 void test_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value,
60 size_t vl) {
61 return vsoxei8_v_i8m2(base, bindex, value, vl);
62 }
63
64 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4(
65 // CHECK-RV64-NEXT: entry:
66 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
67 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
68 // CHECK-RV64-NEXT: ret void
69 //
test_vsoxei8_v_i8m4(int8_t * base,vuint8m4_t bindex,vint8m4_t value,size_t vl)70 void test_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value,
71 size_t vl) {
72 return vsoxei8_v_i8m4(base, bindex, value, vl);
73 }
74
75 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8(
76 // CHECK-RV64-NEXT: entry:
77 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
78 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
79 // CHECK-RV64-NEXT: ret void
80 //
test_vsoxei8_v_i8m8(int8_t * base,vuint8m8_t bindex,vint8m8_t value,size_t vl)81 void test_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value,
82 size_t vl) {
83 return vsoxei8_v_i8m8(base, bindex, value, vl);
84 }
85
86 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8(
87 // CHECK-RV64-NEXT: entry:
88 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
89 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
90 // CHECK-RV64-NEXT: ret void
91 //
test_vsoxei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t value,size_t vl)92 void test_vsoxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value,
93 size_t vl) {
94 return vsoxei16_v_i8mf8(base, bindex, value, vl);
95 }
96
97 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4(
98 // CHECK-RV64-NEXT: entry:
99 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
100 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
101 // CHECK-RV64-NEXT: ret void
102 //
test_vsoxei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t value,size_t vl)103 void test_vsoxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value,
104 size_t vl) {
105 return vsoxei16_v_i8mf4(base, bindex, value, vl);
106 }
107
108 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2(
109 // CHECK-RV64-NEXT: entry:
110 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
111 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
112 // CHECK-RV64-NEXT: ret void
113 //
test_vsoxei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t value,size_t vl)114 void test_vsoxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value,
115 size_t vl) {
116 return vsoxei16_v_i8mf2(base, bindex, value, vl);
117 }
118
119 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1(
120 // CHECK-RV64-NEXT: entry:
121 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
122 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
123 // CHECK-RV64-NEXT: ret void
124 //
test_vsoxei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t value,size_t vl)125 void test_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value,
126 size_t vl) {
127 return vsoxei16_v_i8m1(base, bindex, value, vl);
128 }
129
130 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2(
131 // CHECK-RV64-NEXT: entry:
132 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
133 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
134 // CHECK-RV64-NEXT: ret void
135 //
test_vsoxei16_v_i8m2(int8_t * base,vuint16m4_t bindex,vint8m2_t value,size_t vl)136 void test_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value,
137 size_t vl) {
138 return vsoxei16_v_i8m2(base, bindex, value, vl);
139 }
140
141 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4(
142 // CHECK-RV64-NEXT: entry:
143 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
144 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
145 // CHECK-RV64-NEXT: ret void
146 //
test_vsoxei16_v_i8m4(int8_t * base,vuint16m8_t bindex,vint8m4_t value,size_t vl)147 void test_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value,
148 size_t vl) {
149 return vsoxei16_v_i8m4(base, bindex, value, vl);
150 }
151
152 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8(
153 // CHECK-RV64-NEXT: entry:
154 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
155 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
156 // CHECK-RV64-NEXT: ret void
157 //
test_vsoxei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t value,size_t vl)158 void test_vsoxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value,
159 size_t vl) {
160 return vsoxei32_v_i8mf8(base, bindex, value, vl);
161 }
162
163 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4(
164 // CHECK-RV64-NEXT: entry:
165 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
166 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
167 // CHECK-RV64-NEXT: ret void
168 //
test_vsoxei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t value,size_t vl)169 void test_vsoxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value,
170 size_t vl) {
171 return vsoxei32_v_i8mf4(base, bindex, value, vl);
172 }
173
174 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2(
175 // CHECK-RV64-NEXT: entry:
176 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
177 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
178 // CHECK-RV64-NEXT: ret void
179 //
test_vsoxei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t value,size_t vl)180 void test_vsoxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value,
181 size_t vl) {
182 return vsoxei32_v_i8mf2(base, bindex, value, vl);
183 }
184
185 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1(
186 // CHECK-RV64-NEXT: entry:
187 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
188 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
189 // CHECK-RV64-NEXT: ret void
190 //
test_vsoxei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t value,size_t vl)191 void test_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value,
192 size_t vl) {
193 return vsoxei32_v_i8m1(base, bindex, value, vl);
194 }
195
196 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2(
197 // CHECK-RV64-NEXT: entry:
198 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
199 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
200 // CHECK-RV64-NEXT: ret void
201 //
test_vsoxei32_v_i8m2(int8_t * base,vuint32m8_t bindex,vint8m2_t value,size_t vl)202 void test_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value,
203 size_t vl) {
204 return vsoxei32_v_i8m2(base, bindex, value, vl);
205 }
206
207 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8(
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
210 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
211 // CHECK-RV64-NEXT: ret void
212 //
test_vsoxei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t value,size_t vl)213 void test_vsoxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value,
214 size_t vl) {
215 return vsoxei64_v_i8mf8(base, bindex, value, vl);
216 }
217
218 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4(
219 // CHECK-RV64-NEXT: entry:
220 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
221 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
222 // CHECK-RV64-NEXT: ret void
223 //
test_vsoxei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t value,size_t vl)224 void test_vsoxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value,
225 size_t vl) {
226 return vsoxei64_v_i8mf4(base, bindex, value, vl);
227 }
228
229 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2(
230 // CHECK-RV64-NEXT: entry:
231 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
232 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
233 // CHECK-RV64-NEXT: ret void
234 //
test_vsoxei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t value,size_t vl)235 void test_vsoxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value,
236 size_t vl) {
237 return vsoxei64_v_i8mf2(base, bindex, value, vl);
238 }
239
240 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1(
241 // CHECK-RV64-NEXT: entry:
242 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
243 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
244 // CHECK-RV64-NEXT: ret void
245 //
test_vsoxei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t value,size_t vl)246 void test_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value,
247 size_t vl) {
248 return vsoxei64_v_i8m1(base, bindex, value, vl);
249 }
250
251 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4(
252 // CHECK-RV64-NEXT: entry:
253 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
254 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
255 // CHECK-RV64-NEXT: ret void
256 //
test_vsoxei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t value,size_t vl)257 void test_vsoxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value,
258 size_t vl) {
259 return vsoxei8_v_i16mf4(base, bindex, value, vl);
260 }
261
262 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2(
263 // CHECK-RV64-NEXT: entry:
264 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
265 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
266 // CHECK-RV64-NEXT: ret void
267 //
test_vsoxei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t value,size_t vl)268 void test_vsoxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value,
269 size_t vl) {
270 return vsoxei8_v_i16mf2(base, bindex, value, vl);
271 }
272
273 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1(
274 // CHECK-RV64-NEXT: entry:
275 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
276 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
277 // CHECK-RV64-NEXT: ret void
278 //
test_vsoxei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t value,size_t vl)279 void test_vsoxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value,
280 size_t vl) {
281 return vsoxei8_v_i16m1(base, bindex, value, vl);
282 }
283
284 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2(
285 // CHECK-RV64-NEXT: entry:
286 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
287 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
288 // CHECK-RV64-NEXT: ret void
289 //
test_vsoxei8_v_i16m2(int16_t * base,vuint8m1_t bindex,vint16m2_t value,size_t vl)290 void test_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value,
291 size_t vl) {
292 return vsoxei8_v_i16m2(base, bindex, value, vl);
293 }
294
295 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4(
296 // CHECK-RV64-NEXT: entry:
297 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
298 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
299 // CHECK-RV64-NEXT: ret void
300 //
test_vsoxei8_v_i16m4(int16_t * base,vuint8m2_t bindex,vint16m4_t value,size_t vl)301 void test_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value,
302 size_t vl) {
303 return vsoxei8_v_i16m4(base, bindex, value, vl);
304 }
305
306 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8(
307 // CHECK-RV64-NEXT: entry:
308 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
309 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
310 // CHECK-RV64-NEXT: ret void
311 //
test_vsoxei8_v_i16m8(int16_t * base,vuint8m4_t bindex,vint16m8_t value,size_t vl)312 void test_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value,
313 size_t vl) {
314 return vsoxei8_v_i16m8(base, bindex, value, vl);
315 }
316
317 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4(
318 // CHECK-RV64-NEXT: entry:
319 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
320 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
321 // CHECK-RV64-NEXT: ret void
322 //
test_vsoxei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t value,size_t vl)323 void test_vsoxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex,
324 vint16mf4_t value, size_t vl) {
325 return vsoxei16_v_i16mf4(base, bindex, value, vl);
326 }
327
328 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2(
329 // CHECK-RV64-NEXT: entry:
330 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
331 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
332 // CHECK-RV64-NEXT: ret void
333 //
test_vsoxei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t value,size_t vl)334 void test_vsoxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex,
335 vint16mf2_t value, size_t vl) {
336 return vsoxei16_v_i16mf2(base, bindex, value, vl);
337 }
338
339 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1(
340 // CHECK-RV64-NEXT: entry:
341 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
342 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
343 // CHECK-RV64-NEXT: ret void
344 //
test_vsoxei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t value,size_t vl)345 void test_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value,
346 size_t vl) {
347 return vsoxei16_v_i16m1(base, bindex, value, vl);
348 }
349
350 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2(
351 // CHECK-RV64-NEXT: entry:
352 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
353 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
354 // CHECK-RV64-NEXT: ret void
355 //
test_vsoxei16_v_i16m2(int16_t * base,vuint16m2_t bindex,vint16m2_t value,size_t vl)356 void test_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value,
357 size_t vl) {
358 return vsoxei16_v_i16m2(base, bindex, value, vl);
359 }
360
361 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4(
362 // CHECK-RV64-NEXT: entry:
363 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
364 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
365 // CHECK-RV64-NEXT: ret void
366 //
test_vsoxei16_v_i16m4(int16_t * base,vuint16m4_t bindex,vint16m4_t value,size_t vl)367 void test_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value,
368 size_t vl) {
369 return vsoxei16_v_i16m4(base, bindex, value, vl);
370 }
371
372 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8(
373 // CHECK-RV64-NEXT: entry:
374 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
375 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
376 // CHECK-RV64-NEXT: ret void
377 //
test_vsoxei16_v_i16m8(int16_t * base,vuint16m8_t bindex,vint16m8_t value,size_t vl)378 void test_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value,
379 size_t vl) {
380 return vsoxei16_v_i16m8(base, bindex, value, vl);
381 }
382
383 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4(
384 // CHECK-RV64-NEXT: entry:
385 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
386 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
387 // CHECK-RV64-NEXT: ret void
388 //
test_vsoxei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t value,size_t vl)389 void test_vsoxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex,
390 vint16mf4_t value, size_t vl) {
391 return vsoxei32_v_i16mf4(base, bindex, value, vl);
392 }
393
394 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2(
395 // CHECK-RV64-NEXT: entry:
396 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
397 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
398 // CHECK-RV64-NEXT: ret void
399 //
test_vsoxei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t value,size_t vl)400 void test_vsoxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex,
401 vint16mf2_t value, size_t vl) {
402 return vsoxei32_v_i16mf2(base, bindex, value, vl);
403 }
404
405 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1(
406 // CHECK-RV64-NEXT: entry:
407 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
408 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
409 // CHECK-RV64-NEXT: ret void
410 //
test_vsoxei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t value,size_t vl)411 void test_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value,
412 size_t vl) {
413 return vsoxei32_v_i16m1(base, bindex, value, vl);
414 }
415
416 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2(
417 // CHECK-RV64-NEXT: entry:
418 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
419 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
420 // CHECK-RV64-NEXT: ret void
421 //
test_vsoxei32_v_i16m2(int16_t * base,vuint32m4_t bindex,vint16m2_t value,size_t vl)422 void test_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value,
423 size_t vl) {
424 return vsoxei32_v_i16m2(base, bindex, value, vl);
425 }
426
427 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4(
428 // CHECK-RV64-NEXT: entry:
429 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
430 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
431 // CHECK-RV64-NEXT: ret void
432 //
test_vsoxei32_v_i16m4(int16_t * base,vuint32m8_t bindex,vint16m4_t value,size_t vl)433 void test_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value,
434 size_t vl) {
435 return vsoxei32_v_i16m4(base, bindex, value, vl);
436 }
437
438 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4(
439 // CHECK-RV64-NEXT: entry:
440 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
441 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
442 // CHECK-RV64-NEXT: ret void
443 //
test_vsoxei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t value,size_t vl)444 void test_vsoxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex,
445 vint16mf4_t value, size_t vl) {
446 return vsoxei64_v_i16mf4(base, bindex, value, vl);
447 }
448
449 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2(
450 // CHECK-RV64-NEXT: entry:
451 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
452 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
453 // CHECK-RV64-NEXT: ret void
454 //
test_vsoxei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t value,size_t vl)455 void test_vsoxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex,
456 vint16mf2_t value, size_t vl) {
457 return vsoxei64_v_i16mf2(base, bindex, value, vl);
458 }
459
460 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1(
461 // CHECK-RV64-NEXT: entry:
462 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
463 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
464 // CHECK-RV64-NEXT: ret void
465 //
test_vsoxei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t value,size_t vl)466 void test_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value,
467 size_t vl) {
468 return vsoxei64_v_i16m1(base, bindex, value, vl);
469 }
470
471 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2(
472 // CHECK-RV64-NEXT: entry:
473 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
474 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
475 // CHECK-RV64-NEXT: ret void
476 //
test_vsoxei64_v_i16m2(int16_t * base,vuint64m8_t bindex,vint16m2_t value,size_t vl)477 void test_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value,
478 size_t vl) {
479 return vsoxei64_v_i16m2(base, bindex, value, vl);
480 }
481
482 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2(
483 // CHECK-RV64-NEXT: entry:
484 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
485 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
486 // CHECK-RV64-NEXT: ret void
487 //
test_vsoxei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t value,size_t vl)488 void test_vsoxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value,
489 size_t vl) {
490 return vsoxei8_v_i32mf2(base, bindex, value, vl);
491 }
492
493 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1(
494 // CHECK-RV64-NEXT: entry:
495 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
496 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
497 // CHECK-RV64-NEXT: ret void
498 //
test_vsoxei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t value,size_t vl)499 void test_vsoxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value,
500 size_t vl) {
501 return vsoxei8_v_i32m1(base, bindex, value, vl);
502 }
503
504 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2(
505 // CHECK-RV64-NEXT: entry:
506 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
507 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
508 // CHECK-RV64-NEXT: ret void
509 //
test_vsoxei8_v_i32m2(int32_t * base,vuint8mf2_t bindex,vint32m2_t value,size_t vl)510 void test_vsoxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value,
511 size_t vl) {
512 return vsoxei8_v_i32m2(base, bindex, value, vl);
513 }
514
515 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4(
516 // CHECK-RV64-NEXT: entry:
517 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
518 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
519 // CHECK-RV64-NEXT: ret void
520 //
test_vsoxei8_v_i32m4(int32_t * base,vuint8m1_t bindex,vint32m4_t value,size_t vl)521 void test_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value,
522 size_t vl) {
523 return vsoxei8_v_i32m4(base, bindex, value, vl);
524 }
525
526 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8(
527 // CHECK-RV64-NEXT: entry:
528 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
529 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
530 // CHECK-RV64-NEXT: ret void
531 //
test_vsoxei8_v_i32m8(int32_t * base,vuint8m2_t bindex,vint32m8_t value,size_t vl)532 void test_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value,
533 size_t vl) {
534 return vsoxei8_v_i32m8(base, bindex, value, vl);
535 }
536
537 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2(
538 // CHECK-RV64-NEXT: entry:
539 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
540 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
541 // CHECK-RV64-NEXT: ret void
542 //
test_vsoxei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t value,size_t vl)543 void test_vsoxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex,
544 vint32mf2_t value, size_t vl) {
545 return vsoxei16_v_i32mf2(base, bindex, value, vl);
546 }
547
548 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1(
549 // CHECK-RV64-NEXT: entry:
550 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
551 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
552 // CHECK-RV64-NEXT: ret void
553 //
test_vsoxei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t value,size_t vl)554 void test_vsoxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value,
555 size_t vl) {
556 return vsoxei16_v_i32m1(base, bindex, value, vl);
557 }
558
559 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2(
560 // CHECK-RV64-NEXT: entry:
561 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
562 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
563 // CHECK-RV64-NEXT: ret void
564 //
test_vsoxei16_v_i32m2(int32_t * base,vuint16m1_t bindex,vint32m2_t value,size_t vl)565 void test_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value,
566 size_t vl) {
567 return vsoxei16_v_i32m2(base, bindex, value, vl);
568 }
569
570 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4(
571 // CHECK-RV64-NEXT: entry:
572 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
573 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
574 // CHECK-RV64-NEXT: ret void
575 //
test_vsoxei16_v_i32m4(int32_t * base,vuint16m2_t bindex,vint32m4_t value,size_t vl)576 void test_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value,
577 size_t vl) {
578 return vsoxei16_v_i32m4(base, bindex, value, vl);
579 }
580
581 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8(
582 // CHECK-RV64-NEXT: entry:
583 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
584 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
585 // CHECK-RV64-NEXT: ret void
586 //
test_vsoxei16_v_i32m8(int32_t * base,vuint16m4_t bindex,vint32m8_t value,size_t vl)587 void test_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value,
588 size_t vl) {
589 return vsoxei16_v_i32m8(base, bindex, value, vl);
590 }
591
592 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2(
593 // CHECK-RV64-NEXT: entry:
594 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
595 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
596 // CHECK-RV64-NEXT: ret void
597 //
test_vsoxei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t value,size_t vl)598 void test_vsoxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex,
599 vint32mf2_t value, size_t vl) {
600 return vsoxei32_v_i32mf2(base, bindex, value, vl);
601 }
602
603 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1(
604 // CHECK-RV64-NEXT: entry:
605 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
606 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
607 // CHECK-RV64-NEXT: ret void
608 //
test_vsoxei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t value,size_t vl)609 void test_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value,
610 size_t vl) {
611 return vsoxei32_v_i32m1(base, bindex, value, vl);
612 }
613
614 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2(
615 // CHECK-RV64-NEXT: entry:
616 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
617 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
618 // CHECK-RV64-NEXT: ret void
619 //
test_vsoxei32_v_i32m2(int32_t * base,vuint32m2_t bindex,vint32m2_t value,size_t vl)620 void test_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value,
621 size_t vl) {
622 return vsoxei32_v_i32m2(base, bindex, value, vl);
623 }
624
625 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4(
626 // CHECK-RV64-NEXT: entry:
627 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
628 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
629 // CHECK-RV64-NEXT: ret void
630 //
test_vsoxei32_v_i32m4(int32_t * base,vuint32m4_t bindex,vint32m4_t value,size_t vl)631 void test_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value,
632 size_t vl) {
633 return vsoxei32_v_i32m4(base, bindex, value, vl);
634 }
635
636 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8(
637 // CHECK-RV64-NEXT: entry:
638 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
639 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
640 // CHECK-RV64-NEXT: ret void
641 //
test_vsoxei32_v_i32m8(int32_t * base,vuint32m8_t bindex,vint32m8_t value,size_t vl)642 void test_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value,
643 size_t vl) {
644 return vsoxei32_v_i32m8(base, bindex, value, vl);
645 }
646
647 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2(
648 // CHECK-RV64-NEXT: entry:
649 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
650 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
651 // CHECK-RV64-NEXT: ret void
652 //
test_vsoxei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t value,size_t vl)653 void test_vsoxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex,
654 vint32mf2_t value, size_t vl) {
655 return vsoxei64_v_i32mf2(base, bindex, value, vl);
656 }
657
658 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1(
659 // CHECK-RV64-NEXT: entry:
660 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
661 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
662 // CHECK-RV64-NEXT: ret void
663 //
test_vsoxei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t value,size_t vl)664 void test_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value,
665 size_t vl) {
666 return vsoxei64_v_i32m1(base, bindex, value, vl);
667 }
668
669 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2(
670 // CHECK-RV64-NEXT: entry:
671 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
672 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
673 // CHECK-RV64-NEXT: ret void
674 //
test_vsoxei64_v_i32m2(int32_t * base,vuint64m4_t bindex,vint32m2_t value,size_t vl)675 void test_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value,
676 size_t vl) {
677 return vsoxei64_v_i32m2(base, bindex, value, vl);
678 }
679
680 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4(
681 // CHECK-RV64-NEXT: entry:
682 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
683 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
684 // CHECK-RV64-NEXT: ret void
685 //
test_vsoxei64_v_i32m4(int32_t * base,vuint64m8_t bindex,vint32m4_t value,size_t vl)686 void test_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value,
687 size_t vl) {
688 return vsoxei64_v_i32m4(base, bindex, value, vl);
689 }
690
691 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1(
692 // CHECK-RV64-NEXT: entry:
693 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
694 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
695 // CHECK-RV64-NEXT: ret void
696 //
test_vsoxei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t value,size_t vl)697 void test_vsoxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value,
698 size_t vl) {
699 return vsoxei8_v_i64m1(base, bindex, value, vl);
700 }
701
702 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2(
703 // CHECK-RV64-NEXT: entry:
704 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
705 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
706 // CHECK-RV64-NEXT: ret void
707 //
test_vsoxei8_v_i64m2(int64_t * base,vuint8mf4_t bindex,vint64m2_t value,size_t vl)708 void test_vsoxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value,
709 size_t vl) {
710 return vsoxei8_v_i64m2(base, bindex, value, vl);
711 }
712
713 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4(
714 // CHECK-RV64-NEXT: entry:
715 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
716 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
717 // CHECK-RV64-NEXT: ret void
718 //
test_vsoxei8_v_i64m4(int64_t * base,vuint8mf2_t bindex,vint64m4_t value,size_t vl)719 void test_vsoxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value,
720 size_t vl) {
721 return vsoxei8_v_i64m4(base, bindex, value, vl);
722 }
723
724 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8(
725 // CHECK-RV64-NEXT: entry:
726 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
727 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
728 // CHECK-RV64-NEXT: ret void
729 //
test_vsoxei8_v_i64m8(int64_t * base,vuint8m1_t bindex,vint64m8_t value,size_t vl)730 void test_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value,
731 size_t vl) {
732 return vsoxei8_v_i64m8(base, bindex, value, vl);
733 }
734
735 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1(
736 // CHECK-RV64-NEXT: entry:
737 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
738 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
739 // CHECK-RV64-NEXT: ret void
740 //
test_vsoxei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t value,size_t vl)741 void test_vsoxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value,
742 size_t vl) {
743 return vsoxei16_v_i64m1(base, bindex, value, vl);
744 }
745
746 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2(
747 // CHECK-RV64-NEXT: entry:
748 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
749 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
750 // CHECK-RV64-NEXT: ret void
751 //
test_vsoxei16_v_i64m2(int64_t * base,vuint16mf2_t bindex,vint64m2_t value,size_t vl)752 void test_vsoxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value,
753 size_t vl) {
754 return vsoxei16_v_i64m2(base, bindex, value, vl);
755 }
756
757 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4(
758 // CHECK-RV64-NEXT: entry:
759 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
760 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
761 // CHECK-RV64-NEXT: ret void
762 //
test_vsoxei16_v_i64m4(int64_t * base,vuint16m1_t bindex,vint64m4_t value,size_t vl)763 void test_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value,
764 size_t vl) {
765 return vsoxei16_v_i64m4(base, bindex, value, vl);
766 }
767
768 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8(
769 // CHECK-RV64-NEXT: entry:
770 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
771 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
772 // CHECK-RV64-NEXT: ret void
773 //
test_vsoxei16_v_i64m8(int64_t * base,vuint16m2_t bindex,vint64m8_t value,size_t vl)774 void test_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value,
775 size_t vl) {
776 return vsoxei16_v_i64m8(base, bindex, value, vl);
777 }
778
779 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1(
780 // CHECK-RV64-NEXT: entry:
781 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
782 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
783 // CHECK-RV64-NEXT: ret void
784 //
test_vsoxei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t value,size_t vl)785 void test_vsoxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value,
786 size_t vl) {
787 return vsoxei32_v_i64m1(base, bindex, value, vl);
788 }
789
790 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2(
791 // CHECK-RV64-NEXT: entry:
792 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
793 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
794 // CHECK-RV64-NEXT: ret void
795 //
test_vsoxei32_v_i64m2(int64_t * base,vuint32m1_t bindex,vint64m2_t value,size_t vl)796 void test_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value,
797 size_t vl) {
798 return vsoxei32_v_i64m2(base, bindex, value, vl);
799 }
800
801 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4(
802 // CHECK-RV64-NEXT: entry:
803 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
804 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
805 // CHECK-RV64-NEXT: ret void
806 //
test_vsoxei32_v_i64m4(int64_t * base,vuint32m2_t bindex,vint64m4_t value,size_t vl)807 void test_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value,
808 size_t vl) {
809 return vsoxei32_v_i64m4(base, bindex, value, vl);
810 }
811
812 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8(
813 // CHECK-RV64-NEXT: entry:
814 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
815 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
816 // CHECK-RV64-NEXT: ret void
817 //
test_vsoxei32_v_i64m8(int64_t * base,vuint32m4_t bindex,vint64m8_t value,size_t vl)818 void test_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value,
819 size_t vl) {
820 return vsoxei32_v_i64m8(base, bindex, value, vl);
821 }
822
823 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1(
824 // CHECK-RV64-NEXT: entry:
825 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
826 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
827 // CHECK-RV64-NEXT: ret void
828 //
test_vsoxei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t value,size_t vl)829 void test_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value,
830 size_t vl) {
831 return vsoxei64_v_i64m1(base, bindex, value, vl);
832 }
833
834 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2(
835 // CHECK-RV64-NEXT: entry:
836 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
837 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
838 // CHECK-RV64-NEXT: ret void
839 //
test_vsoxei64_v_i64m2(int64_t * base,vuint64m2_t bindex,vint64m2_t value,size_t vl)840 void test_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value,
841 size_t vl) {
842 return vsoxei64_v_i64m2(base, bindex, value, vl);
843 }
844
845 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4(
846 // CHECK-RV64-NEXT: entry:
847 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
848 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
849 // CHECK-RV64-NEXT: ret void
850 //
test_vsoxei64_v_i64m4(int64_t * base,vuint64m4_t bindex,vint64m4_t value,size_t vl)851 void test_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value,
852 size_t vl) {
853 return vsoxei64_v_i64m4(base, bindex, value, vl);
854 }
855
856 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8(
857 // CHECK-RV64-NEXT: entry:
858 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
859 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
860 // CHECK-RV64-NEXT: ret void
861 //
test_vsoxei64_v_i64m8(int64_t * base,vuint64m8_t bindex,vint64m8_t value,size_t vl)862 void test_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value,
863 size_t vl) {
864 return vsoxei64_v_i64m8(base, bindex, value, vl);
865 }
866
867 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8(
868 // CHECK-RV64-NEXT: entry:
869 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
870 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
871 // CHECK-RV64-NEXT: ret void
872 //
test_vsoxei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t value,size_t vl)873 void test_vsoxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value,
874 size_t vl) {
875 return vsoxei8_v_u8mf8(base, bindex, value, vl);
876 }
877
878 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4(
879 // CHECK-RV64-NEXT: entry:
880 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
881 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
882 // CHECK-RV64-NEXT: ret void
883 //
test_vsoxei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t value,size_t vl)884 void test_vsoxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value,
885 size_t vl) {
886 return vsoxei8_v_u8mf4(base, bindex, value, vl);
887 }
888
889 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2(
890 // CHECK-RV64-NEXT: entry:
891 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
892 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
893 // CHECK-RV64-NEXT: ret void
894 //
test_vsoxei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t value,size_t vl)895 void test_vsoxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value,
896 size_t vl) {
897 return vsoxei8_v_u8mf2(base, bindex, value, vl);
898 }
899
900 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1(
901 // CHECK-RV64-NEXT: entry:
902 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
903 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
904 // CHECK-RV64-NEXT: ret void
905 //
test_vsoxei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t value,size_t vl)906 void test_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value,
907 size_t vl) {
908 return vsoxei8_v_u8m1(base, bindex, value, vl);
909 }
910
911 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2(
912 // CHECK-RV64-NEXT: entry:
913 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
914 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
915 // CHECK-RV64-NEXT: ret void
916 //
test_vsoxei8_v_u8m2(uint8_t * base,vuint8m2_t bindex,vuint8m2_t value,size_t vl)917 void test_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value,
918 size_t vl) {
919 return vsoxei8_v_u8m2(base, bindex, value, vl);
920 }
921
922 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4(
923 // CHECK-RV64-NEXT: entry:
924 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
925 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
926 // CHECK-RV64-NEXT: ret void
927 //
test_vsoxei8_v_u8m4(uint8_t * base,vuint8m4_t bindex,vuint8m4_t value,size_t vl)928 void test_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value,
929 size_t vl) {
930 return vsoxei8_v_u8m4(base, bindex, value, vl);
931 }
932
933 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8(
934 // CHECK-RV64-NEXT: entry:
935 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
936 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
937 // CHECK-RV64-NEXT: ret void
938 //
test_vsoxei8_v_u8m8(uint8_t * base,vuint8m8_t bindex,vuint8m8_t value,size_t vl)939 void test_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value,
940 size_t vl) {
941 return vsoxei8_v_u8m8(base, bindex, value, vl);
942 }
943
944 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8(
945 // CHECK-RV64-NEXT: entry:
946 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
947 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
948 // CHECK-RV64-NEXT: ret void
949 //
test_vsoxei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t value,size_t vl)950 void test_vsoxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex,
951 vuint8mf8_t value, size_t vl) {
952 return vsoxei16_v_u8mf8(base, bindex, value, vl);
953 }
954
955 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4(
956 // CHECK-RV64-NEXT: entry:
957 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
958 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
959 // CHECK-RV64-NEXT: ret void
960 //
test_vsoxei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t value,size_t vl)961 void test_vsoxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex,
962 vuint8mf4_t value, size_t vl) {
963 return vsoxei16_v_u8mf4(base, bindex, value, vl);
964 }
965
966 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2(
967 // CHECK-RV64-NEXT: entry:
968 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
969 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
970 // CHECK-RV64-NEXT: ret void
971 //
test_vsoxei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t value,size_t vl)972 void test_vsoxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value,
973 size_t vl) {
974 return vsoxei16_v_u8mf2(base, bindex, value, vl);
975 }
976
977 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1(
978 // CHECK-RV64-NEXT: entry:
979 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
980 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
981 // CHECK-RV64-NEXT: ret void
982 //
test_vsoxei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t value,size_t vl)983 void test_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value,
984 size_t vl) {
985 return vsoxei16_v_u8m1(base, bindex, value, vl);
986 }
987
988 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2(
989 // CHECK-RV64-NEXT: entry:
990 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
991 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
992 // CHECK-RV64-NEXT: ret void
993 //
test_vsoxei16_v_u8m2(uint8_t * base,vuint16m4_t bindex,vuint8m2_t value,size_t vl)994 void test_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value,
995 size_t vl) {
996 return vsoxei16_v_u8m2(base, bindex, value, vl);
997 }
998
999 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4(
1000 // CHECK-RV64-NEXT: entry:
1001 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
1002 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1003 // CHECK-RV64-NEXT: ret void
1004 //
test_vsoxei16_v_u8m4(uint8_t * base,vuint16m8_t bindex,vuint8m4_t value,size_t vl)1005 void test_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value,
1006 size_t vl) {
1007 return vsoxei16_v_u8m4(base, bindex, value, vl);
1008 }
1009
1010 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8(
1011 // CHECK-RV64-NEXT: entry:
1012 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
1013 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1014 // CHECK-RV64-NEXT: ret void
1015 //
test_vsoxei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t value,size_t vl)1016 void test_vsoxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex,
1017 vuint8mf8_t value, size_t vl) {
1018 return vsoxei32_v_u8mf8(base, bindex, value, vl);
1019 }
1020
1021 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4(
1022 // CHECK-RV64-NEXT: entry:
1023 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
1024 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1025 // CHECK-RV64-NEXT: ret void
1026 //
test_vsoxei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t value,size_t vl)1027 void test_vsoxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value,
1028 size_t vl) {
1029 return vsoxei32_v_u8mf4(base, bindex, value, vl);
1030 }
1031
1032 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2(
1033 // CHECK-RV64-NEXT: entry:
1034 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
1035 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1036 // CHECK-RV64-NEXT: ret void
1037 //
test_vsoxei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t value,size_t vl)1038 void test_vsoxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value,
1039 size_t vl) {
1040 return vsoxei32_v_u8mf2(base, bindex, value, vl);
1041 }
1042
1043 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1(
1044 // CHECK-RV64-NEXT: entry:
1045 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
1046 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1047 // CHECK-RV64-NEXT: ret void
1048 //
test_vsoxei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t value,size_t vl)1049 void test_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value,
1050 size_t vl) {
1051 return vsoxei32_v_u8m1(base, bindex, value, vl);
1052 }
1053
1054 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2(
1055 // CHECK-RV64-NEXT: entry:
1056 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
1057 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1058 // CHECK-RV64-NEXT: ret void
1059 //
test_vsoxei32_v_u8m2(uint8_t * base,vuint32m8_t bindex,vuint8m2_t value,size_t vl)1060 void test_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value,
1061 size_t vl) {
1062 return vsoxei32_v_u8m2(base, bindex, value, vl);
1063 }
1064
1065 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8(
1066 // CHECK-RV64-NEXT: entry:
1067 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
1068 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1069 // CHECK-RV64-NEXT: ret void
1070 //
test_vsoxei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t value,size_t vl)1071 void test_vsoxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value,
1072 size_t vl) {
1073 return vsoxei64_v_u8mf8(base, bindex, value, vl);
1074 }
1075
1076 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4(
1077 // CHECK-RV64-NEXT: entry:
1078 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
1079 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1080 // CHECK-RV64-NEXT: ret void
1081 //
test_vsoxei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t value,size_t vl)1082 void test_vsoxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value,
1083 size_t vl) {
1084 return vsoxei64_v_u8mf4(base, bindex, value, vl);
1085 }
1086
1087 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2(
1088 // CHECK-RV64-NEXT: entry:
1089 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
1090 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1091 // CHECK-RV64-NEXT: ret void
1092 //
test_vsoxei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t value,size_t vl)1093 void test_vsoxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value,
1094 size_t vl) {
1095 return vsoxei64_v_u8mf2(base, bindex, value, vl);
1096 }
1097
1098 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1(
1099 // CHECK-RV64-NEXT: entry:
1100 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
1101 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1102 // CHECK-RV64-NEXT: ret void
1103 //
test_vsoxei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t value,size_t vl)1104 void test_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value,
1105 size_t vl) {
1106 return vsoxei64_v_u8m1(base, bindex, value, vl);
1107 }
1108
1109 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4(
1110 // CHECK-RV64-NEXT: entry:
1111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1112 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1113 // CHECK-RV64-NEXT: ret void
1114 //
test_vsoxei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t value,size_t vl)1115 void test_vsoxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex,
1116 vuint16mf4_t value, size_t vl) {
1117 return vsoxei8_v_u16mf4(base, bindex, value, vl);
1118 }
1119
1120 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2(
1121 // CHECK-RV64-NEXT: entry:
1122 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1123 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1124 // CHECK-RV64-NEXT: ret void
1125 //
test_vsoxei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t value,size_t vl)1126 void test_vsoxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex,
1127 vuint16mf2_t value, size_t vl) {
1128 return vsoxei8_v_u16mf2(base, bindex, value, vl);
1129 }
1130
1131 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1(
1132 // CHECK-RV64-NEXT: entry:
1133 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1134 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1135 // CHECK-RV64-NEXT: ret void
1136 //
test_vsoxei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t value,size_t vl)1137 void test_vsoxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value,
1138 size_t vl) {
1139 return vsoxei8_v_u16m1(base, bindex, value, vl);
1140 }
1141
1142 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2(
1143 // CHECK-RV64-NEXT: entry:
1144 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1145 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1146 // CHECK-RV64-NEXT: ret void
1147 //
test_vsoxei8_v_u16m2(uint16_t * base,vuint8m1_t bindex,vuint16m2_t value,size_t vl)1148 void test_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value,
1149 size_t vl) {
1150 return vsoxei8_v_u16m2(base, bindex, value, vl);
1151 }
1152
1153 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4(
1154 // CHECK-RV64-NEXT: entry:
1155 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1156 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1157 // CHECK-RV64-NEXT: ret void
1158 //
test_vsoxei8_v_u16m4(uint16_t * base,vuint8m2_t bindex,vuint16m4_t value,size_t vl)1159 void test_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value,
1160 size_t vl) {
1161 return vsoxei8_v_u16m4(base, bindex, value, vl);
1162 }
1163
1164 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8(
1165 // CHECK-RV64-NEXT: entry:
1166 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
1167 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1168 // CHECK-RV64-NEXT: ret void
1169 //
test_vsoxei8_v_u16m8(uint16_t * base,vuint8m4_t bindex,vuint16m8_t value,size_t vl)1170 void test_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value,
1171 size_t vl) {
1172 return vsoxei8_v_u16m8(base, bindex, value, vl);
1173 }
1174
1175 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4(
1176 // CHECK-RV64-NEXT: entry:
1177 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1178 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1179 // CHECK-RV64-NEXT: ret void
1180 //
test_vsoxei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t value,size_t vl)1181 void test_vsoxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex,
1182 vuint16mf4_t value, size_t vl) {
1183 return vsoxei16_v_u16mf4(base, bindex, value, vl);
1184 }
1185
1186 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2(
1187 // CHECK-RV64-NEXT: entry:
1188 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1189 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1190 // CHECK-RV64-NEXT: ret void
1191 //
test_vsoxei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t value,size_t vl)1192 void test_vsoxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex,
1193 vuint16mf2_t value, size_t vl) {
1194 return vsoxei16_v_u16mf2(base, bindex, value, vl);
1195 }
1196
1197 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1(
1198 // CHECK-RV64-NEXT: entry:
1199 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1200 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1201 // CHECK-RV64-NEXT: ret void
1202 //
test_vsoxei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t value,size_t vl)1203 void test_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex,
1204 vuint16m1_t value, size_t vl) {
1205 return vsoxei16_v_u16m1(base, bindex, value, vl);
1206 }
1207
1208 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2(
1209 // CHECK-RV64-NEXT: entry:
1210 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1211 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1212 // CHECK-RV64-NEXT: ret void
1213 //
test_vsoxei16_v_u16m2(uint16_t * base,vuint16m2_t bindex,vuint16m2_t value,size_t vl)1214 void test_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex,
1215 vuint16m2_t value, size_t vl) {
1216 return vsoxei16_v_u16m2(base, bindex, value, vl);
1217 }
1218
1219 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4(
1220 // CHECK-RV64-NEXT: entry:
1221 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1222 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1223 // CHECK-RV64-NEXT: ret void
1224 //
test_vsoxei16_v_u16m4(uint16_t * base,vuint16m4_t bindex,vuint16m4_t value,size_t vl)1225 void test_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex,
1226 vuint16m4_t value, size_t vl) {
1227 return vsoxei16_v_u16m4(base, bindex, value, vl);
1228 }
1229
1230 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8(
1231 // CHECK-RV64-NEXT: entry:
1232 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
1233 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1234 // CHECK-RV64-NEXT: ret void
1235 //
test_vsoxei16_v_u16m8(uint16_t * base,vuint16m8_t bindex,vuint16m8_t value,size_t vl)1236 void test_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex,
1237 vuint16m8_t value, size_t vl) {
1238 return vsoxei16_v_u16m8(base, bindex, value, vl);
1239 }
1240
1241 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4(
1242 // CHECK-RV64-NEXT: entry:
1243 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1244 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1245 // CHECK-RV64-NEXT: ret void
1246 //
test_vsoxei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t value,size_t vl)1247 void test_vsoxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex,
1248 vuint16mf4_t value, size_t vl) {
1249 return vsoxei32_v_u16mf4(base, bindex, value, vl);
1250 }
1251
1252 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2(
1253 // CHECK-RV64-NEXT: entry:
1254 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1255 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1256 // CHECK-RV64-NEXT: ret void
1257 //
test_vsoxei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t value,size_t vl)1258 void test_vsoxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex,
1259 vuint16mf2_t value, size_t vl) {
1260 return vsoxei32_v_u16mf2(base, bindex, value, vl);
1261 }
1262
1263 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1(
1264 // CHECK-RV64-NEXT: entry:
1265 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1266 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1267 // CHECK-RV64-NEXT: ret void
1268 //
test_vsoxei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t value,size_t vl)1269 void test_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex,
1270 vuint16m1_t value, size_t vl) {
1271 return vsoxei32_v_u16m1(base, bindex, value, vl);
1272 }
1273
1274 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2(
1275 // CHECK-RV64-NEXT: entry:
1276 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1277 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1278 // CHECK-RV64-NEXT: ret void
1279 //
test_vsoxei32_v_u16m2(uint16_t * base,vuint32m4_t bindex,vuint16m2_t value,size_t vl)1280 void test_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex,
1281 vuint16m2_t value, size_t vl) {
1282 return vsoxei32_v_u16m2(base, bindex, value, vl);
1283 }
1284
1285 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4(
1286 // CHECK-RV64-NEXT: entry:
1287 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
1288 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1289 // CHECK-RV64-NEXT: ret void
1290 //
test_vsoxei32_v_u16m4(uint16_t * base,vuint32m8_t bindex,vuint16m4_t value,size_t vl)1291 void test_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex,
1292 vuint16m4_t value, size_t vl) {
1293 return vsoxei32_v_u16m4(base, bindex, value, vl);
1294 }
1295
1296 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4(
1297 // CHECK-RV64-NEXT: entry:
1298 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
1299 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1300 // CHECK-RV64-NEXT: ret void
1301 //
test_vsoxei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t value,size_t vl)1302 void test_vsoxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex,
1303 vuint16mf4_t value, size_t vl) {
1304 return vsoxei64_v_u16mf4(base, bindex, value, vl);
1305 }
1306
1307 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2(
1308 // CHECK-RV64-NEXT: entry:
1309 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
1310 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1311 // CHECK-RV64-NEXT: ret void
1312 //
test_vsoxei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t value,size_t vl)1313 void test_vsoxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex,
1314 vuint16mf2_t value, size_t vl) {
1315 return vsoxei64_v_u16mf2(base, bindex, value, vl);
1316 }
1317
1318 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1(
1319 // CHECK-RV64-NEXT: entry:
1320 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
1321 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1322 // CHECK-RV64-NEXT: ret void
1323 //
test_vsoxei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t value,size_t vl)1324 void test_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex,
1325 vuint16m1_t value, size_t vl) {
1326 return vsoxei64_v_u16m1(base, bindex, value, vl);
1327 }
1328
1329 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2(
1330 // CHECK-RV64-NEXT: entry:
1331 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
1332 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1333 // CHECK-RV64-NEXT: ret void
1334 //
test_vsoxei64_v_u16m2(uint16_t * base,vuint64m8_t bindex,vuint16m2_t value,size_t vl)1335 void test_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex,
1336 vuint16m2_t value, size_t vl) {
1337 return vsoxei64_v_u16m2(base, bindex, value, vl);
1338 }
1339
1340 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2(
1341 // CHECK-RV64-NEXT: entry:
1342 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1343 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1344 // CHECK-RV64-NEXT: ret void
1345 //
test_vsoxei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t value,size_t vl)1346 void test_vsoxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex,
1347 vuint32mf2_t value, size_t vl) {
1348 return vsoxei8_v_u32mf2(base, bindex, value, vl);
1349 }
1350
1351 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1(
1352 // CHECK-RV64-NEXT: entry:
1353 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1354 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1355 // CHECK-RV64-NEXT: ret void
1356 //
test_vsoxei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t value,size_t vl)1357 void test_vsoxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value,
1358 size_t vl) {
1359 return vsoxei8_v_u32m1(base, bindex, value, vl);
1360 }
1361
1362 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2(
1363 // CHECK-RV64-NEXT: entry:
1364 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1365 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1366 // CHECK-RV64-NEXT: ret void
1367 //
test_vsoxei8_v_u32m2(uint32_t * base,vuint8mf2_t bindex,vuint32m2_t value,size_t vl)1368 void test_vsoxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value,
1369 size_t vl) {
1370 return vsoxei8_v_u32m2(base, bindex, value, vl);
1371 }
1372
1373 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4(
1374 // CHECK-RV64-NEXT: entry:
1375 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1376 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1377 // CHECK-RV64-NEXT: ret void
1378 //
test_vsoxei8_v_u32m4(uint32_t * base,vuint8m1_t bindex,vuint32m4_t value,size_t vl)1379 void test_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value,
1380 size_t vl) {
1381 return vsoxei8_v_u32m4(base, bindex, value, vl);
1382 }
1383
1384 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8(
1385 // CHECK-RV64-NEXT: entry:
1386 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1387 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1388 // CHECK-RV64-NEXT: ret void
1389 //
test_vsoxei8_v_u32m8(uint32_t * base,vuint8m2_t bindex,vuint32m8_t value,size_t vl)1390 void test_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value,
1391 size_t vl) {
1392 return vsoxei8_v_u32m8(base, bindex, value, vl);
1393 }
1394
1395 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2(
1396 // CHECK-RV64-NEXT: entry:
1397 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1398 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1399 // CHECK-RV64-NEXT: ret void
1400 //
test_vsoxei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t value,size_t vl)1401 void test_vsoxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex,
1402 vuint32mf2_t value, size_t vl) {
1403 return vsoxei16_v_u32mf2(base, bindex, value, vl);
1404 }
1405
1406 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1(
1407 // CHECK-RV64-NEXT: entry:
1408 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1409 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1410 // CHECK-RV64-NEXT: ret void
1411 //
test_vsoxei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t value,size_t vl)1412 void test_vsoxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex,
1413 vuint32m1_t value, size_t vl) {
1414 return vsoxei16_v_u32m1(base, bindex, value, vl);
1415 }
1416
1417 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2(
1418 // CHECK-RV64-NEXT: entry:
1419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1420 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1421 // CHECK-RV64-NEXT: ret void
1422 //
test_vsoxei16_v_u32m2(uint32_t * base,vuint16m1_t bindex,vuint32m2_t value,size_t vl)1423 void test_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex,
1424 vuint32m2_t value, size_t vl) {
1425 return vsoxei16_v_u32m2(base, bindex, value, vl);
1426 }
1427
1428 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4(
1429 // CHECK-RV64-NEXT: entry:
1430 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1431 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1432 // CHECK-RV64-NEXT: ret void
1433 //
test_vsoxei16_v_u32m4(uint32_t * base,vuint16m2_t bindex,vuint32m4_t value,size_t vl)1434 void test_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex,
1435 vuint32m4_t value, size_t vl) {
1436 return vsoxei16_v_u32m4(base, bindex, value, vl);
1437 }
1438
1439 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8(
1440 // CHECK-RV64-NEXT: entry:
1441 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1442 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1443 // CHECK-RV64-NEXT: ret void
1444 //
test_vsoxei16_v_u32m8(uint32_t * base,vuint16m4_t bindex,vuint32m8_t value,size_t vl)1445 void test_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex,
1446 vuint32m8_t value, size_t vl) {
1447 return vsoxei16_v_u32m8(base, bindex, value, vl);
1448 }
1449
1450 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2(
1451 // CHECK-RV64-NEXT: entry:
1452 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1453 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1454 // CHECK-RV64-NEXT: ret void
1455 //
test_vsoxei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t value,size_t vl)1456 void test_vsoxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex,
1457 vuint32mf2_t value, size_t vl) {
1458 return vsoxei32_v_u32mf2(base, bindex, value, vl);
1459 }
1460
1461 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1(
1462 // CHECK-RV64-NEXT: entry:
1463 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1464 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1465 // CHECK-RV64-NEXT: ret void
1466 //
test_vsoxei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t value,size_t vl)1467 void test_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex,
1468 vuint32m1_t value, size_t vl) {
1469 return vsoxei32_v_u32m1(base, bindex, value, vl);
1470 }
1471
1472 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2(
1473 // CHECK-RV64-NEXT: entry:
1474 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1475 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1476 // CHECK-RV64-NEXT: ret void
1477 //
test_vsoxei32_v_u32m2(uint32_t * base,vuint32m2_t bindex,vuint32m2_t value,size_t vl)1478 void test_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex,
1479 vuint32m2_t value, size_t vl) {
1480 return vsoxei32_v_u32m2(base, bindex, value, vl);
1481 }
1482
1483 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4(
1484 // CHECK-RV64-NEXT: entry:
1485 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1486 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1487 // CHECK-RV64-NEXT: ret void
1488 //
test_vsoxei32_v_u32m4(uint32_t * base,vuint32m4_t bindex,vuint32m4_t value,size_t vl)1489 void test_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex,
1490 vuint32m4_t value, size_t vl) {
1491 return vsoxei32_v_u32m4(base, bindex, value, vl);
1492 }
1493
1494 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8(
1495 // CHECK-RV64-NEXT: entry:
1496 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
1497 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1498 // CHECK-RV64-NEXT: ret void
1499 //
test_vsoxei32_v_u32m8(uint32_t * base,vuint32m8_t bindex,vuint32m8_t value,size_t vl)1500 void test_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex,
1501 vuint32m8_t value, size_t vl) {
1502 return vsoxei32_v_u32m8(base, bindex, value, vl);
1503 }
1504
1505 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2(
1506 // CHECK-RV64-NEXT: entry:
1507 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
1508 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1509 // CHECK-RV64-NEXT: ret void
1510 //
test_vsoxei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t value,size_t vl)1511 void test_vsoxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex,
1512 vuint32mf2_t value, size_t vl) {
1513 return vsoxei64_v_u32mf2(base, bindex, value, vl);
1514 }
1515
1516 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1(
1517 // CHECK-RV64-NEXT: entry:
1518 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
1519 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1520 // CHECK-RV64-NEXT: ret void
1521 //
test_vsoxei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t value,size_t vl)1522 void test_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex,
1523 vuint32m1_t value, size_t vl) {
1524 return vsoxei64_v_u32m1(base, bindex, value, vl);
1525 }
1526
1527 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2(
1528 // CHECK-RV64-NEXT: entry:
1529 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
1530 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1531 // CHECK-RV64-NEXT: ret void
1532 //
test_vsoxei64_v_u32m2(uint32_t * base,vuint64m4_t bindex,vuint32m2_t value,size_t vl)1533 void test_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex,
1534 vuint32m2_t value, size_t vl) {
1535 return vsoxei64_v_u32m2(base, bindex, value, vl);
1536 }
1537
1538 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4(
1539 // CHECK-RV64-NEXT: entry:
1540 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
1541 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1542 // CHECK-RV64-NEXT: ret void
1543 //
test_vsoxei64_v_u32m4(uint32_t * base,vuint64m8_t bindex,vuint32m4_t value,size_t vl)1544 void test_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex,
1545 vuint32m4_t value, size_t vl) {
1546 return vsoxei64_v_u32m4(base, bindex, value, vl);
1547 }
1548
1549 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1(
1550 // CHECK-RV64-NEXT: entry:
1551 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1552 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1553 // CHECK-RV64-NEXT: ret void
1554 //
test_vsoxei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t value,size_t vl)1555 void test_vsoxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value,
1556 size_t vl) {
1557 return vsoxei8_v_u64m1(base, bindex, value, vl);
1558 }
1559
1560 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2(
1561 // CHECK-RV64-NEXT: entry:
1562 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1563 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1564 // CHECK-RV64-NEXT: ret void
1565 //
test_vsoxei8_v_u64m2(uint64_t * base,vuint8mf4_t bindex,vuint64m2_t value,size_t vl)1566 void test_vsoxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value,
1567 size_t vl) {
1568 return vsoxei8_v_u64m2(base, bindex, value, vl);
1569 }
1570
1571 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4(
1572 // CHECK-RV64-NEXT: entry:
1573 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1574 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1575 // CHECK-RV64-NEXT: ret void
1576 //
test_vsoxei8_v_u64m4(uint64_t * base,vuint8mf2_t bindex,vuint64m4_t value,size_t vl)1577 void test_vsoxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value,
1578 size_t vl) {
1579 return vsoxei8_v_u64m4(base, bindex, value, vl);
1580 }
1581
1582 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8(
1583 // CHECK-RV64-NEXT: entry:
1584 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1585 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1586 // CHECK-RV64-NEXT: ret void
1587 //
test_vsoxei8_v_u64m8(uint64_t * base,vuint8m1_t bindex,vuint64m8_t value,size_t vl)1588 void test_vsoxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value,
1589 size_t vl) {
1590 return vsoxei8_v_u64m8(base, bindex, value, vl);
1591 }
1592
1593 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1(
1594 // CHECK-RV64-NEXT: entry:
1595 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1596 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1597 // CHECK-RV64-NEXT: ret void
1598 //
test_vsoxei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t value,size_t vl)1599 void test_vsoxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex,
1600 vuint64m1_t value, size_t vl) {
1601 return vsoxei16_v_u64m1(base, bindex, value, vl);
1602 }
1603
1604 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2(
1605 // CHECK-RV64-NEXT: entry:
1606 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1607 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1608 // CHECK-RV64-NEXT: ret void
1609 //
test_vsoxei16_v_u64m2(uint64_t * base,vuint16mf2_t bindex,vuint64m2_t value,size_t vl)1610 void test_vsoxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex,
1611 vuint64m2_t value, size_t vl) {
1612 return vsoxei16_v_u64m2(base, bindex, value, vl);
1613 }
1614
1615 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4(
1616 // CHECK-RV64-NEXT: entry:
1617 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1618 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1619 // CHECK-RV64-NEXT: ret void
1620 //
test_vsoxei16_v_u64m4(uint64_t * base,vuint16m1_t bindex,vuint64m4_t value,size_t vl)1621 void test_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex,
1622 vuint64m4_t value, size_t vl) {
1623 return vsoxei16_v_u64m4(base, bindex, value, vl);
1624 }
1625
1626 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8(
1627 // CHECK-RV64-NEXT: entry:
1628 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1629 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1630 // CHECK-RV64-NEXT: ret void
1631 //
test_vsoxei16_v_u64m8(uint64_t * base,vuint16m2_t bindex,vuint64m8_t value,size_t vl)1632 void test_vsoxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex,
1633 vuint64m8_t value, size_t vl) {
1634 return vsoxei16_v_u64m8(base, bindex, value, vl);
1635 }
1636
1637 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1(
1638 // CHECK-RV64-NEXT: entry:
1639 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1640 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1641 // CHECK-RV64-NEXT: ret void
1642 //
test_vsoxei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t value,size_t vl)1643 void test_vsoxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex,
1644 vuint64m1_t value, size_t vl) {
1645 return vsoxei32_v_u64m1(base, bindex, value, vl);
1646 }
1647
1648 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2(
1649 // CHECK-RV64-NEXT: entry:
1650 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1651 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1652 // CHECK-RV64-NEXT: ret void
1653 //
test_vsoxei32_v_u64m2(uint64_t * base,vuint32m1_t bindex,vuint64m2_t value,size_t vl)1654 void test_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex,
1655 vuint64m2_t value, size_t vl) {
1656 return vsoxei32_v_u64m2(base, bindex, value, vl);
1657 }
1658
1659 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4(
1660 // CHECK-RV64-NEXT: entry:
1661 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1662 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1663 // CHECK-RV64-NEXT: ret void
1664 //
test_vsoxei32_v_u64m4(uint64_t * base,vuint32m2_t bindex,vuint64m4_t value,size_t vl)1665 void test_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex,
1666 vuint64m4_t value, size_t vl) {
1667 return vsoxei32_v_u64m4(base, bindex, value, vl);
1668 }
1669
1670 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8(
1671 // CHECK-RV64-NEXT: entry:
1672 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1673 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1674 // CHECK-RV64-NEXT: ret void
1675 //
test_vsoxei32_v_u64m8(uint64_t * base,vuint32m4_t bindex,vuint64m8_t value,size_t vl)1676 void test_vsoxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex,
1677 vuint64m8_t value, size_t vl) {
1678 return vsoxei32_v_u64m8(base, bindex, value, vl);
1679 }
1680
1681 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1(
1682 // CHECK-RV64-NEXT: entry:
1683 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
1684 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1685 // CHECK-RV64-NEXT: ret void
1686 //
test_vsoxei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t value,size_t vl)1687 void test_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex,
1688 vuint64m1_t value, size_t vl) {
1689 return vsoxei64_v_u64m1(base, bindex, value, vl);
1690 }
1691
1692 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2(
1693 // CHECK-RV64-NEXT: entry:
1694 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
1695 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1696 // CHECK-RV64-NEXT: ret void
1697 //
test_vsoxei64_v_u64m2(uint64_t * base,vuint64m2_t bindex,vuint64m2_t value,size_t vl)1698 void test_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex,
1699 vuint64m2_t value, size_t vl) {
1700 return vsoxei64_v_u64m2(base, bindex, value, vl);
1701 }
1702
1703 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4(
1704 // CHECK-RV64-NEXT: entry:
1705 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
1706 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1707 // CHECK-RV64-NEXT: ret void
1708 //
test_vsoxei64_v_u64m4(uint64_t * base,vuint64m4_t bindex,vuint64m4_t value,size_t vl)1709 void test_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex,
1710 vuint64m4_t value, size_t vl) {
1711 return vsoxei64_v_u64m4(base, bindex, value, vl);
1712 }
1713
1714 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8(
1715 // CHECK-RV64-NEXT: entry:
1716 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
1717 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1718 // CHECK-RV64-NEXT: ret void
1719 //
test_vsoxei64_v_u64m8(uint64_t * base,vuint64m8_t bindex,vuint64m8_t value,size_t vl)1720 void test_vsoxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex,
1721 vuint64m8_t value, size_t vl) {
1722 return vsoxei64_v_u64m8(base, bindex, value, vl);
1723 }
1724
1725 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16mf4(
1726 // CHECK-RV64-NEXT: entry:
1727 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
1728 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f16.nxv1i8.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1729 // CHECK-RV64-NEXT: ret void
1730 //
test_vsoxei8_v_f16mf4(_Float16 * base,vuint8mf8_t bindex,vfloat16mf4_t value,size_t vl)1731 void test_vsoxei8_v_f16mf4(_Float16 *base, vuint8mf8_t bindex, vfloat16mf4_t value, size_t vl) {
1732 return vsoxei8_v_f16mf4(base, bindex, value, vl);
1733 }
1734
1735 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16mf2(
1736 // CHECK-RV64-NEXT: entry:
1737 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
1738 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f16.nxv2i8.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1739 // CHECK-RV64-NEXT: ret void
1740 //
test_vsoxei8_v_f16mf2(_Float16 * base,vuint8mf4_t bindex,vfloat16mf2_t value,size_t vl)1741 void test_vsoxei8_v_f16mf2(_Float16 *base, vuint8mf4_t bindex, vfloat16mf2_t value, size_t vl) {
1742 return vsoxei8_v_f16mf2(base, bindex, value, vl);
1743 }
1744
1745 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m1(
1746 // CHECK-RV64-NEXT: entry:
1747 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
1748 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f16.nxv4i8.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1749 // CHECK-RV64-NEXT: ret void
1750 //
test_vsoxei8_v_f16m1(_Float16 * base,vuint8mf2_t bindex,vfloat16m1_t value,size_t vl)1751 void test_vsoxei8_v_f16m1(_Float16 *base, vuint8mf2_t bindex, vfloat16m1_t value, size_t vl) {
1752 return vsoxei8_v_f16m1(base, bindex, value, vl);
1753 }
1754
1755 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m2(
1756 // CHECK-RV64-NEXT: entry:
1757 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
1758 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f16.nxv8i8.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1759 // CHECK-RV64-NEXT: ret void
1760 //
test_vsoxei8_v_f16m2(_Float16 * base,vuint8m1_t bindex,vfloat16m2_t value,size_t vl)1761 void test_vsoxei8_v_f16m2(_Float16 *base, vuint8m1_t bindex, vfloat16m2_t value, size_t vl) {
1762 return vsoxei8_v_f16m2(base, bindex, value, vl);
1763 }
1764
1765 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m4(
1766 // CHECK-RV64-NEXT: entry:
1767 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
1768 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f16.nxv16i8.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1769 // CHECK-RV64-NEXT: ret void
1770 //
test_vsoxei8_v_f16m4(_Float16 * base,vuint8m2_t bindex,vfloat16m4_t value,size_t vl)1771 void test_vsoxei8_v_f16m4(_Float16 *base, vuint8m2_t bindex, vfloat16m4_t value, size_t vl) {
1772 return vsoxei8_v_f16m4(base, bindex, value, vl);
1773 }
1774
1775 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m8(
1776 // CHECK-RV64-NEXT: entry:
1777 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 32 x half>*
1778 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32f16.nxv32i8.i64(<vscale x 32 x half> [[VALUE:%.*]], <vscale x 32 x half>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1779 // CHECK-RV64-NEXT: ret void
1780 //
test_vsoxei8_v_f16m8(_Float16 * base,vuint8m4_t bindex,vfloat16m8_t value,size_t vl)1781 void test_vsoxei8_v_f16m8(_Float16 *base, vuint8m4_t bindex, vfloat16m8_t value, size_t vl) {
1782 return vsoxei8_v_f16m8(base, bindex, value, vl);
1783 }
1784
1785 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16mf4(
1786 // CHECK-RV64-NEXT: entry:
1787 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
1788 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f16.nxv1i16.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1789 // CHECK-RV64-NEXT: ret void
1790 //
test_vsoxei16_v_f16mf4(_Float16 * base,vuint16mf4_t bindex,vfloat16mf4_t value,size_t vl)1791 void test_vsoxei16_v_f16mf4(_Float16 *base, vuint16mf4_t bindex, vfloat16mf4_t value, size_t vl) {
1792 return vsoxei16_v_f16mf4(base, bindex, value, vl);
1793 }
1794
1795 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16mf2(
1796 // CHECK-RV64-NEXT: entry:
1797 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
1798 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f16.nxv2i16.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1799 // CHECK-RV64-NEXT: ret void
1800 //
test_vsoxei16_v_f16mf2(_Float16 * base,vuint16mf2_t bindex,vfloat16mf2_t value,size_t vl)1801 void test_vsoxei16_v_f16mf2(_Float16 *base, vuint16mf2_t bindex, vfloat16mf2_t value, size_t vl) {
1802 return vsoxei16_v_f16mf2(base, bindex, value, vl);
1803 }
1804
1805 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m1(
1806 // CHECK-RV64-NEXT: entry:
1807 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
1808 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f16.nxv4i16.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1809 // CHECK-RV64-NEXT: ret void
1810 //
test_vsoxei16_v_f16m1(_Float16 * base,vuint16m1_t bindex,vfloat16m1_t value,size_t vl)1811 void test_vsoxei16_v_f16m1(_Float16 *base, vuint16m1_t bindex, vfloat16m1_t value, size_t vl) {
1812 return vsoxei16_v_f16m1(base, bindex, value, vl);
1813 }
1814
1815 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m2(
1816 // CHECK-RV64-NEXT: entry:
1817 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
1818 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f16.nxv8i16.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1819 // CHECK-RV64-NEXT: ret void
1820 //
test_vsoxei16_v_f16m2(_Float16 * base,vuint16m2_t bindex,vfloat16m2_t value,size_t vl)1821 void test_vsoxei16_v_f16m2(_Float16 *base, vuint16m2_t bindex, vfloat16m2_t value, size_t vl) {
1822 return vsoxei16_v_f16m2(base, bindex, value, vl);
1823 }
1824
1825 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m4(
1826 // CHECK-RV64-NEXT: entry:
1827 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
1828 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f16.nxv16i16.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1829 // CHECK-RV64-NEXT: ret void
1830 //
test_vsoxei16_v_f16m4(_Float16 * base,vuint16m4_t bindex,vfloat16m4_t value,size_t vl)1831 void test_vsoxei16_v_f16m4(_Float16 *base, vuint16m4_t bindex, vfloat16m4_t value, size_t vl) {
1832 return vsoxei16_v_f16m4(base, bindex, value, vl);
1833 }
1834
1835 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m8(
1836 // CHECK-RV64-NEXT: entry:
1837 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 32 x half>*
1838 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32f16.nxv32i16.i64(<vscale x 32 x half> [[VALUE:%.*]], <vscale x 32 x half>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1839 // CHECK-RV64-NEXT: ret void
1840 //
test_vsoxei16_v_f16m8(_Float16 * base,vuint16m8_t bindex,vfloat16m8_t value,size_t vl)1841 void test_vsoxei16_v_f16m8(_Float16 *base, vuint16m8_t bindex, vfloat16m8_t value, size_t vl) {
1842 return vsoxei16_v_f16m8(base, bindex, value, vl);
1843 }
1844
1845 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16mf4(
1846 // CHECK-RV64-NEXT: entry:
1847 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
1848 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f16.nxv1i32.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1849 // CHECK-RV64-NEXT: ret void
1850 //
test_vsoxei32_v_f16mf4(_Float16 * base,vuint32mf2_t bindex,vfloat16mf4_t value,size_t vl)1851 void test_vsoxei32_v_f16mf4(_Float16 *base, vuint32mf2_t bindex, vfloat16mf4_t value, size_t vl) {
1852 return vsoxei32_v_f16mf4(base, bindex, value, vl);
1853 }
1854
1855 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16mf2(
1856 // CHECK-RV64-NEXT: entry:
1857 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
1858 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f16.nxv2i32.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1859 // CHECK-RV64-NEXT: ret void
1860 //
test_vsoxei32_v_f16mf2(_Float16 * base,vuint32m1_t bindex,vfloat16mf2_t value,size_t vl)1861 void test_vsoxei32_v_f16mf2(_Float16 *base, vuint32m1_t bindex, vfloat16mf2_t value, size_t vl) {
1862 return vsoxei32_v_f16mf2(base, bindex, value, vl);
1863 }
1864
1865 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m1(
1866 // CHECK-RV64-NEXT: entry:
1867 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
1868 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f16.nxv4i32.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1869 // CHECK-RV64-NEXT: ret void
1870 //
test_vsoxei32_v_f16m1(_Float16 * base,vuint32m2_t bindex,vfloat16m1_t value,size_t vl)1871 void test_vsoxei32_v_f16m1(_Float16 *base, vuint32m2_t bindex, vfloat16m1_t value, size_t vl) {
1872 return vsoxei32_v_f16m1(base, bindex, value, vl);
1873 }
1874
1875 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m2(
1876 // CHECK-RV64-NEXT: entry:
1877 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
1878 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f16.nxv8i32.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1879 // CHECK-RV64-NEXT: ret void
1880 //
test_vsoxei32_v_f16m2(_Float16 * base,vuint32m4_t bindex,vfloat16m2_t value,size_t vl)1881 void test_vsoxei32_v_f16m2(_Float16 *base, vuint32m4_t bindex, vfloat16m2_t value, size_t vl) {
1882 return vsoxei32_v_f16m2(base, bindex, value, vl);
1883 }
1884
1885 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m4(
1886 // CHECK-RV64-NEXT: entry:
1887 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
1888 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f16.nxv16i32.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1889 // CHECK-RV64-NEXT: ret void
1890 //
test_vsoxei32_v_f16m4(_Float16 * base,vuint32m8_t bindex,vfloat16m4_t value,size_t vl)1891 void test_vsoxei32_v_f16m4(_Float16 *base, vuint32m8_t bindex, vfloat16m4_t value, size_t vl) {
1892 return vsoxei32_v_f16m4(base, bindex, value, vl);
1893 }
1894
1895 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16mf4(
1896 // CHECK-RV64-NEXT: entry:
1897 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
1898 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f16.nxv1i64.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1899 // CHECK-RV64-NEXT: ret void
1900 //
test_vsoxei64_v_f16mf4(_Float16 * base,vuint64m1_t bindex,vfloat16mf4_t value,size_t vl)1901 void test_vsoxei64_v_f16mf4(_Float16 *base, vuint64m1_t bindex, vfloat16mf4_t value, size_t vl) {
1902 return vsoxei64_v_f16mf4(base, bindex, value, vl);
1903 }
1904
1905 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16mf2(
1906 // CHECK-RV64-NEXT: entry:
1907 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
1908 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f16.nxv2i64.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1909 // CHECK-RV64-NEXT: ret void
1910 //
test_vsoxei64_v_f16mf2(_Float16 * base,vuint64m2_t bindex,vfloat16mf2_t value,size_t vl)1911 void test_vsoxei64_v_f16mf2(_Float16 *base, vuint64m2_t bindex, vfloat16mf2_t value, size_t vl) {
1912 return vsoxei64_v_f16mf2(base, bindex, value, vl);
1913 }
1914
1915 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16m1(
1916 // CHECK-RV64-NEXT: entry:
1917 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
1918 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f16.nxv4i64.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1919 // CHECK-RV64-NEXT: ret void
1920 //
test_vsoxei64_v_f16m1(_Float16 * base,vuint64m4_t bindex,vfloat16m1_t value,size_t vl)1921 void test_vsoxei64_v_f16m1(_Float16 *base, vuint64m4_t bindex, vfloat16m1_t value, size_t vl) {
1922 return vsoxei64_v_f16m1(base, bindex, value, vl);
1923 }
1924
1925 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16m2(
1926 // CHECK-RV64-NEXT: entry:
1927 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
1928 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f16.nxv8i64.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1929 // CHECK-RV64-NEXT: ret void
1930 //
test_vsoxei64_v_f16m2(_Float16 * base,vuint64m8_t bindex,vfloat16m2_t value,size_t vl)1931 void test_vsoxei64_v_f16m2(_Float16 *base, vuint64m8_t bindex, vfloat16m2_t value, size_t vl) {
1932 return vsoxei64_v_f16m2(base, bindex, value, vl);
1933 }
1934
1935 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2(
1936 // CHECK-RV64-NEXT: entry:
1937 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
1938 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1939 // CHECK-RV64-NEXT: ret void
1940 //
test_vsoxei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t value,size_t vl)1941 void test_vsoxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value,
1942 size_t vl) {
1943 return vsoxei8_v_f32mf2(base, bindex, value, vl);
1944 }
1945
1946 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1(
1947 // CHECK-RV64-NEXT: entry:
1948 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
1949 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1950 // CHECK-RV64-NEXT: ret void
1951 //
test_vsoxei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t value,size_t vl)1952 void test_vsoxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value,
1953 size_t vl) {
1954 return vsoxei8_v_f32m1(base, bindex, value, vl);
1955 }
1956
1957 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2(
1958 // CHECK-RV64-NEXT: entry:
1959 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
1960 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1961 // CHECK-RV64-NEXT: ret void
1962 //
test_vsoxei8_v_f32m2(float * base,vuint8mf2_t bindex,vfloat32m2_t value,size_t vl)1963 void test_vsoxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value,
1964 size_t vl) {
1965 return vsoxei8_v_f32m2(base, bindex, value, vl);
1966 }
1967
1968 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4(
1969 // CHECK-RV64-NEXT: entry:
1970 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
1971 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1972 // CHECK-RV64-NEXT: ret void
1973 //
test_vsoxei8_v_f32m4(float * base,vuint8m1_t bindex,vfloat32m4_t value,size_t vl)1974 void test_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value,
1975 size_t vl) {
1976 return vsoxei8_v_f32m4(base, bindex, value, vl);
1977 }
1978
1979 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8(
1980 // CHECK-RV64-NEXT: entry:
1981 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
1982 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1983 // CHECK-RV64-NEXT: ret void
1984 //
test_vsoxei8_v_f32m8(float * base,vuint8m2_t bindex,vfloat32m8_t value,size_t vl)1985 void test_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value,
1986 size_t vl) {
1987 return vsoxei8_v_f32m8(base, bindex, value, vl);
1988 }
1989
1990 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2(
1991 // CHECK-RV64-NEXT: entry:
1992 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
1993 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1994 // CHECK-RV64-NEXT: ret void
1995 //
test_vsoxei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t value,size_t vl)1996 void test_vsoxei16_v_f32mf2(float *base, vuint16mf4_t bindex,
1997 vfloat32mf2_t value, size_t vl) {
1998 return vsoxei16_v_f32mf2(base, bindex, value, vl);
1999 }
2000
2001 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1(
2002 // CHECK-RV64-NEXT: entry:
2003 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
2004 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2005 // CHECK-RV64-NEXT: ret void
2006 //
test_vsoxei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t value,size_t vl)2007 void test_vsoxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value,
2008 size_t vl) {
2009 return vsoxei16_v_f32m1(base, bindex, value, vl);
2010 }
2011
2012 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2(
2013 // CHECK-RV64-NEXT: entry:
2014 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
2015 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2016 // CHECK-RV64-NEXT: ret void
2017 //
test_vsoxei16_v_f32m2(float * base,vuint16m1_t bindex,vfloat32m2_t value,size_t vl)2018 void test_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value,
2019 size_t vl) {
2020 return vsoxei16_v_f32m2(base, bindex, value, vl);
2021 }
2022
2023 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4(
2024 // CHECK-RV64-NEXT: entry:
2025 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
2026 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2027 // CHECK-RV64-NEXT: ret void
2028 //
test_vsoxei16_v_f32m4(float * base,vuint16m2_t bindex,vfloat32m4_t value,size_t vl)2029 void test_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value,
2030 size_t vl) {
2031 return vsoxei16_v_f32m4(base, bindex, value, vl);
2032 }
2033
2034 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8(
2035 // CHECK-RV64-NEXT: entry:
2036 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
2037 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2038 // CHECK-RV64-NEXT: ret void
2039 //
test_vsoxei16_v_f32m8(float * base,vuint16m4_t bindex,vfloat32m8_t value,size_t vl)2040 void test_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value,
2041 size_t vl) {
2042 return vsoxei16_v_f32m8(base, bindex, value, vl);
2043 }
2044
2045 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2(
2046 // CHECK-RV64-NEXT: entry:
2047 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
2048 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2049 // CHECK-RV64-NEXT: ret void
2050 //
test_vsoxei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t value,size_t vl)2051 void test_vsoxei32_v_f32mf2(float *base, vuint32mf2_t bindex,
2052 vfloat32mf2_t value, size_t vl) {
2053 return vsoxei32_v_f32mf2(base, bindex, value, vl);
2054 }
2055
2056 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1(
2057 // CHECK-RV64-NEXT: entry:
2058 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
2059 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2060 // CHECK-RV64-NEXT: ret void
2061 //
test_vsoxei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t value,size_t vl)2062 void test_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value,
2063 size_t vl) {
2064 return vsoxei32_v_f32m1(base, bindex, value, vl);
2065 }
2066
2067 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2(
2068 // CHECK-RV64-NEXT: entry:
2069 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
2070 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2071 // CHECK-RV64-NEXT: ret void
2072 //
test_vsoxei32_v_f32m2(float * base,vuint32m2_t bindex,vfloat32m2_t value,size_t vl)2073 void test_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value,
2074 size_t vl) {
2075 return vsoxei32_v_f32m2(base, bindex, value, vl);
2076 }
2077
2078 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4(
2079 // CHECK-RV64-NEXT: entry:
2080 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
2081 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2082 // CHECK-RV64-NEXT: ret void
2083 //
test_vsoxei32_v_f32m4(float * base,vuint32m4_t bindex,vfloat32m4_t value,size_t vl)2084 void test_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value,
2085 size_t vl) {
2086 return vsoxei32_v_f32m4(base, bindex, value, vl);
2087 }
2088
2089 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8(
2090 // CHECK-RV64-NEXT: entry:
2091 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
2092 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2093 // CHECK-RV64-NEXT: ret void
2094 //
test_vsoxei32_v_f32m8(float * base,vuint32m8_t bindex,vfloat32m8_t value,size_t vl)2095 void test_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value,
2096 size_t vl) {
2097 return vsoxei32_v_f32m8(base, bindex, value, vl);
2098 }
2099
2100 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2(
2101 // CHECK-RV64-NEXT: entry:
2102 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
2103 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2104 // CHECK-RV64-NEXT: ret void
2105 //
test_vsoxei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t value,size_t vl)2106 void test_vsoxei64_v_f32mf2(float *base, vuint64m1_t bindex,
2107 vfloat32mf2_t value, size_t vl) {
2108 return vsoxei64_v_f32mf2(base, bindex, value, vl);
2109 }
2110
2111 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1(
2112 // CHECK-RV64-NEXT: entry:
2113 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
2114 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2115 // CHECK-RV64-NEXT: ret void
2116 //
test_vsoxei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t value,size_t vl)2117 void test_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value,
2118 size_t vl) {
2119 return vsoxei64_v_f32m1(base, bindex, value, vl);
2120 }
2121
2122 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2(
2123 // CHECK-RV64-NEXT: entry:
2124 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
2125 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2126 // CHECK-RV64-NEXT: ret void
2127 //
test_vsoxei64_v_f32m2(float * base,vuint64m4_t bindex,vfloat32m2_t value,size_t vl)2128 void test_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value,
2129 size_t vl) {
2130 return vsoxei64_v_f32m2(base, bindex, value, vl);
2131 }
2132
2133 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4(
2134 // CHECK-RV64-NEXT: entry:
2135 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
2136 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2137 // CHECK-RV64-NEXT: ret void
2138 //
test_vsoxei64_v_f32m4(float * base,vuint64m8_t bindex,vfloat32m4_t value,size_t vl)2139 void test_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value,
2140 size_t vl) {
2141 return vsoxei64_v_f32m4(base, bindex, value, vl);
2142 }
2143
2144 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1(
2145 // CHECK-RV64-NEXT: entry:
2146 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2147 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2148 // CHECK-RV64-NEXT: ret void
2149 //
test_vsoxei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t value,size_t vl)2150 void test_vsoxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value,
2151 size_t vl) {
2152 return vsoxei8_v_f64m1(base, bindex, value, vl);
2153 }
2154
2155 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2(
2156 // CHECK-RV64-NEXT: entry:
2157 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2158 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2159 // CHECK-RV64-NEXT: ret void
2160 //
test_vsoxei8_v_f64m2(double * base,vuint8mf4_t bindex,vfloat64m2_t value,size_t vl)2161 void test_vsoxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value,
2162 size_t vl) {
2163 return vsoxei8_v_f64m2(base, bindex, value, vl);
2164 }
2165
2166 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4(
2167 // CHECK-RV64-NEXT: entry:
2168 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2169 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2170 // CHECK-RV64-NEXT: ret void
2171 //
test_vsoxei8_v_f64m4(double * base,vuint8mf2_t bindex,vfloat64m4_t value,size_t vl)2172 void test_vsoxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value,
2173 size_t vl) {
2174 return vsoxei8_v_f64m4(base, bindex, value, vl);
2175 }
2176
2177 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8(
2178 // CHECK-RV64-NEXT: entry:
2179 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2180 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2181 // CHECK-RV64-NEXT: ret void
2182 //
test_vsoxei8_v_f64m8(double * base,vuint8m1_t bindex,vfloat64m8_t value,size_t vl)2183 void test_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value,
2184 size_t vl) {
2185 return vsoxei8_v_f64m8(base, bindex, value, vl);
2186 }
2187
2188 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1(
2189 // CHECK-RV64-NEXT: entry:
2190 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2191 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2192 // CHECK-RV64-NEXT: ret void
2193 //
test_vsoxei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t value,size_t vl)2194 void test_vsoxei16_v_f64m1(double *base, vuint16mf4_t bindex,
2195 vfloat64m1_t value, size_t vl) {
2196 return vsoxei16_v_f64m1(base, bindex, value, vl);
2197 }
2198
2199 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2(
2200 // CHECK-RV64-NEXT: entry:
2201 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2202 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2203 // CHECK-RV64-NEXT: ret void
2204 //
test_vsoxei16_v_f64m2(double * base,vuint16mf2_t bindex,vfloat64m2_t value,size_t vl)2205 void test_vsoxei16_v_f64m2(double *base, vuint16mf2_t bindex,
2206 vfloat64m2_t value, size_t vl) {
2207 return vsoxei16_v_f64m2(base, bindex, value, vl);
2208 }
2209
2210 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4(
2211 // CHECK-RV64-NEXT: entry:
2212 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2213 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2214 // CHECK-RV64-NEXT: ret void
2215 //
test_vsoxei16_v_f64m4(double * base,vuint16m1_t bindex,vfloat64m4_t value,size_t vl)2216 void test_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value,
2217 size_t vl) {
2218 return vsoxei16_v_f64m4(base, bindex, value, vl);
2219 }
2220
2221 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8(
2222 // CHECK-RV64-NEXT: entry:
2223 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2224 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2225 // CHECK-RV64-NEXT: ret void
2226 //
test_vsoxei16_v_f64m8(double * base,vuint16m2_t bindex,vfloat64m8_t value,size_t vl)2227 void test_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value,
2228 size_t vl) {
2229 return vsoxei16_v_f64m8(base, bindex, value, vl);
2230 }
2231
2232 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1(
2233 // CHECK-RV64-NEXT: entry:
2234 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2235 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2236 // CHECK-RV64-NEXT: ret void
2237 //
test_vsoxei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t value,size_t vl)2238 void test_vsoxei32_v_f64m1(double *base, vuint32mf2_t bindex,
2239 vfloat64m1_t value, size_t vl) {
2240 return vsoxei32_v_f64m1(base, bindex, value, vl);
2241 }
2242
2243 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2(
2244 // CHECK-RV64-NEXT: entry:
2245 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2246 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2247 // CHECK-RV64-NEXT: ret void
2248 //
test_vsoxei32_v_f64m2(double * base,vuint32m1_t bindex,vfloat64m2_t value,size_t vl)2249 void test_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value,
2250 size_t vl) {
2251 return vsoxei32_v_f64m2(base, bindex, value, vl);
2252 }
2253
2254 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4(
2255 // CHECK-RV64-NEXT: entry:
2256 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2257 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2258 // CHECK-RV64-NEXT: ret void
2259 //
test_vsoxei32_v_f64m4(double * base,vuint32m2_t bindex,vfloat64m4_t value,size_t vl)2260 void test_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value,
2261 size_t vl) {
2262 return vsoxei32_v_f64m4(base, bindex, value, vl);
2263 }
2264
2265 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8(
2266 // CHECK-RV64-NEXT: entry:
2267 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2268 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2269 // CHECK-RV64-NEXT: ret void
2270 //
test_vsoxei32_v_f64m8(double * base,vuint32m4_t bindex,vfloat64m8_t value,size_t vl)2271 void test_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value,
2272 size_t vl) {
2273 return vsoxei32_v_f64m8(base, bindex, value, vl);
2274 }
2275
2276 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1(
2277 // CHECK-RV64-NEXT: entry:
2278 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
2279 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2280 // CHECK-RV64-NEXT: ret void
2281 //
test_vsoxei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t value,size_t vl)2282 void test_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value,
2283 size_t vl) {
2284 return vsoxei64_v_f64m1(base, bindex, value, vl);
2285 }
2286
2287 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2(
2288 // CHECK-RV64-NEXT: entry:
2289 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
2290 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2291 // CHECK-RV64-NEXT: ret void
2292 //
test_vsoxei64_v_f64m2(double * base,vuint64m2_t bindex,vfloat64m2_t value,size_t vl)2293 void test_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value,
2294 size_t vl) {
2295 return vsoxei64_v_f64m2(base, bindex, value, vl);
2296 }
2297
2298 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4(
2299 // CHECK-RV64-NEXT: entry:
2300 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
2301 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2302 // CHECK-RV64-NEXT: ret void
2303 //
test_vsoxei64_v_f64m4(double * base,vuint64m4_t bindex,vfloat64m4_t value,size_t vl)2304 void test_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value,
2305 size_t vl) {
2306 return vsoxei64_v_f64m4(base, bindex, value, vl);
2307 }
2308
2309 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8(
2310 // CHECK-RV64-NEXT: entry:
2311 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
2312 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2313 // CHECK-RV64-NEXT: ret void
2314 //
test_vsoxei64_v_f64m8(double * base,vuint64m8_t bindex,vfloat64m8_t value,size_t vl)2315 void test_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value,
2316 size_t vl) {
2317 return vsoxei64_v_f64m8(base, bindex, value, vl);
2318 }
2319
2320 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8_m(
2321 // CHECK-RV64-NEXT: entry:
2322 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2323 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2324 // CHECK-RV64-NEXT: ret void
2325 //
test_vsoxei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t value,size_t vl)2326 void test_vsoxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex,
2327 vint8mf8_t value, size_t vl) {
2328 return vsoxei8_v_i8mf8_m(mask, base, bindex, value, vl);
2329 }
2330
2331 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4_m(
2332 // CHECK-RV64-NEXT: entry:
2333 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2334 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2335 // CHECK-RV64-NEXT: ret void
2336 //
test_vsoxei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t value,size_t vl)2337 void test_vsoxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex,
2338 vint8mf4_t value, size_t vl) {
2339 return vsoxei8_v_i8mf4_m(mask, base, bindex, value, vl);
2340 }
2341
2342 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2_m(
2343 // CHECK-RV64-NEXT: entry:
2344 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2345 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2346 // CHECK-RV64-NEXT: ret void
2347 //
test_vsoxei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t value,size_t vl)2348 void test_vsoxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex,
2349 vint8mf2_t value, size_t vl) {
2350 return vsoxei8_v_i8mf2_m(mask, base, bindex, value, vl);
2351 }
2352
2353 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1_m(
2354 // CHECK-RV64-NEXT: entry:
2355 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2356 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2357 // CHECK-RV64-NEXT: ret void
2358 //
test_vsoxei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t value,size_t vl)2359 void test_vsoxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex,
2360 vint8m1_t value, size_t vl) {
2361 return vsoxei8_v_i8m1_m(mask, base, bindex, value, vl);
2362 }
2363
2364 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2_m(
2365 // CHECK-RV64-NEXT: entry:
2366 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2367 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2368 // CHECK-RV64-NEXT: ret void
2369 //
test_vsoxei8_v_i8m2_m(vbool4_t mask,int8_t * base,vuint8m2_t bindex,vint8m2_t value,size_t vl)2370 void test_vsoxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex,
2371 vint8m2_t value, size_t vl) {
2372 return vsoxei8_v_i8m2_m(mask, base, bindex, value, vl);
2373 }
2374
2375 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4_m(
2376 // CHECK-RV64-NEXT: entry:
2377 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
2378 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2379 // CHECK-RV64-NEXT: ret void
2380 //
test_vsoxei8_v_i8m4_m(vbool2_t mask,int8_t * base,vuint8m4_t bindex,vint8m4_t value,size_t vl)2381 void test_vsoxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex,
2382 vint8m4_t value, size_t vl) {
2383 return vsoxei8_v_i8m4_m(mask, base, bindex, value, vl);
2384 }
2385
2386 // CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8_m(
2387 // CHECK-RV64-NEXT: entry:
2388 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
2389 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2390 // CHECK-RV64-NEXT: ret void
2391 //
test_vsoxei8_v_i8m8_m(vbool1_t mask,int8_t * base,vuint8m8_t bindex,vint8m8_t value,size_t vl)2392 void test_vsoxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex,
2393 vint8m8_t value, size_t vl) {
2394 return vsoxei8_v_i8m8_m(mask, base, bindex, value, vl);
2395 }
2396
2397 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8_m(
2398 // CHECK-RV64-NEXT: entry:
2399 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2400 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2401 // CHECK-RV64-NEXT: ret void
2402 //
test_vsoxei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t value,size_t vl)2403 void test_vsoxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex,
2404 vint8mf8_t value, size_t vl) {
2405 return vsoxei16_v_i8mf8_m(mask, base, bindex, value, vl);
2406 }
2407
2408 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4_m(
2409 // CHECK-RV64-NEXT: entry:
2410 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2411 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2412 // CHECK-RV64-NEXT: ret void
2413 //
test_vsoxei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t value,size_t vl)2414 void test_vsoxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex,
2415 vint8mf4_t value, size_t vl) {
2416 return vsoxei16_v_i8mf4_m(mask, base, bindex, value, vl);
2417 }
2418
2419 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2_m(
2420 // CHECK-RV64-NEXT: entry:
2421 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2422 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2423 // CHECK-RV64-NEXT: ret void
2424 //
test_vsoxei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t value,size_t vl)2425 void test_vsoxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex,
2426 vint8mf2_t value, size_t vl) {
2427 return vsoxei16_v_i8mf2_m(mask, base, bindex, value, vl);
2428 }
2429
2430 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1_m(
2431 // CHECK-RV64-NEXT: entry:
2432 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2433 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2434 // CHECK-RV64-NEXT: ret void
2435 //
test_vsoxei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t value,size_t vl)2436 void test_vsoxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex,
2437 vint8m1_t value, size_t vl) {
2438 return vsoxei16_v_i8m1_m(mask, base, bindex, value, vl);
2439 }
2440
2441 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2_m(
2442 // CHECK-RV64-NEXT: entry:
2443 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2444 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2445 // CHECK-RV64-NEXT: ret void
2446 //
test_vsoxei16_v_i8m2_m(vbool4_t mask,int8_t * base,vuint16m4_t bindex,vint8m2_t value,size_t vl)2447 void test_vsoxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex,
2448 vint8m2_t value, size_t vl) {
2449 return vsoxei16_v_i8m2_m(mask, base, bindex, value, vl);
2450 }
2451
2452 // CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4_m(
2453 // CHECK-RV64-NEXT: entry:
2454 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
2455 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2456 // CHECK-RV64-NEXT: ret void
2457 //
test_vsoxei16_v_i8m4_m(vbool2_t mask,int8_t * base,vuint16m8_t bindex,vint8m4_t value,size_t vl)2458 void test_vsoxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex,
2459 vint8m4_t value, size_t vl) {
2460 return vsoxei16_v_i8m4_m(mask, base, bindex, value, vl);
2461 }
2462
2463 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8_m(
2464 // CHECK-RV64-NEXT: entry:
2465 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2466 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2467 // CHECK-RV64-NEXT: ret void
2468 //
test_vsoxei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t value,size_t vl)2469 void test_vsoxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex,
2470 vint8mf8_t value, size_t vl) {
2471 return vsoxei32_v_i8mf8_m(mask, base, bindex, value, vl);
2472 }
2473
2474 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4_m(
2475 // CHECK-RV64-NEXT: entry:
2476 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2477 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2478 // CHECK-RV64-NEXT: ret void
2479 //
test_vsoxei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t value,size_t vl)2480 void test_vsoxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex,
2481 vint8mf4_t value, size_t vl) {
2482 return vsoxei32_v_i8mf4_m(mask, base, bindex, value, vl);
2483 }
2484
2485 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2_m(
2486 // CHECK-RV64-NEXT: entry:
2487 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2488 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2489 // CHECK-RV64-NEXT: ret void
2490 //
test_vsoxei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t value,size_t vl)2491 void test_vsoxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex,
2492 vint8mf2_t value, size_t vl) {
2493 return vsoxei32_v_i8mf2_m(mask, base, bindex, value, vl);
2494 }
2495
2496 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1_m(
2497 // CHECK-RV64-NEXT: entry:
2498 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2499 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2500 // CHECK-RV64-NEXT: ret void
2501 //
test_vsoxei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t value,size_t vl)2502 void test_vsoxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex,
2503 vint8m1_t value, size_t vl) {
2504 return vsoxei32_v_i8m1_m(mask, base, bindex, value, vl);
2505 }
2506
2507 // CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2_m(
2508 // CHECK-RV64-NEXT: entry:
2509 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
2510 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2511 // CHECK-RV64-NEXT: ret void
2512 //
test_vsoxei32_v_i8m2_m(vbool4_t mask,int8_t * base,vuint32m8_t bindex,vint8m2_t value,size_t vl)2513 void test_vsoxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex,
2514 vint8m2_t value, size_t vl) {
2515 return vsoxei32_v_i8m2_m(mask, base, bindex, value, vl);
2516 }
2517
2518 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8_m(
2519 // CHECK-RV64-NEXT: entry:
2520 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
2521 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2522 // CHECK-RV64-NEXT: ret void
2523 //
test_vsoxei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t value,size_t vl)2524 void test_vsoxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex,
2525 vint8mf8_t value, size_t vl) {
2526 return vsoxei64_v_i8mf8_m(mask, base, bindex, value, vl);
2527 }
2528
2529 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4_m(
2530 // CHECK-RV64-NEXT: entry:
2531 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
2532 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2533 // CHECK-RV64-NEXT: ret void
2534 //
test_vsoxei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t value,size_t vl)2535 void test_vsoxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex,
2536 vint8mf4_t value, size_t vl) {
2537 return vsoxei64_v_i8mf4_m(mask, base, bindex, value, vl);
2538 }
2539
2540 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2_m(
2541 // CHECK-RV64-NEXT: entry:
2542 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
2543 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2544 // CHECK-RV64-NEXT: ret void
2545 //
test_vsoxei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t value,size_t vl)2546 void test_vsoxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex,
2547 vint8mf2_t value, size_t vl) {
2548 return vsoxei64_v_i8mf2_m(mask, base, bindex, value, vl);
2549 }
2550
2551 // CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1_m(
2552 // CHECK-RV64-NEXT: entry:
2553 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
2554 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2555 // CHECK-RV64-NEXT: ret void
2556 //
test_vsoxei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t value,size_t vl)2557 void test_vsoxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex,
2558 vint8m1_t value, size_t vl) {
2559 return vsoxei64_v_i8m1_m(mask, base, bindex, value, vl);
2560 }
2561
2562 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4_m(
2563 // CHECK-RV64-NEXT: entry:
2564 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2565 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2566 // CHECK-RV64-NEXT: ret void
2567 //
test_vsoxei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t value,size_t vl)2568 void test_vsoxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex,
2569 vint16mf4_t value, size_t vl) {
2570 return vsoxei8_v_i16mf4_m(mask, base, bindex, value, vl);
2571 }
2572
2573 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2_m(
2574 // CHECK-RV64-NEXT: entry:
2575 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2576 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2577 // CHECK-RV64-NEXT: ret void
2578 //
test_vsoxei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t value,size_t vl)2579 void test_vsoxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex,
2580 vint16mf2_t value, size_t vl) {
2581 return vsoxei8_v_i16mf2_m(mask, base, bindex, value, vl);
2582 }
2583
2584 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1_m(
2585 // CHECK-RV64-NEXT: entry:
2586 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2587 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2588 // CHECK-RV64-NEXT: ret void
2589 //
test_vsoxei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t value,size_t vl)2590 void test_vsoxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex,
2591 vint16m1_t value, size_t vl) {
2592 return vsoxei8_v_i16m1_m(mask, base, bindex, value, vl);
2593 }
2594
2595 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2_m(
2596 // CHECK-RV64-NEXT: entry:
2597 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2598 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2599 // CHECK-RV64-NEXT: ret void
2600 //
test_vsoxei8_v_i16m2_m(vbool8_t mask,int16_t * base,vuint8m1_t bindex,vint16m2_t value,size_t vl)2601 void test_vsoxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex,
2602 vint16m2_t value, size_t vl) {
2603 return vsoxei8_v_i16m2_m(mask, base, bindex, value, vl);
2604 }
2605
2606 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4_m(
2607 // CHECK-RV64-NEXT: entry:
2608 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2609 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2610 // CHECK-RV64-NEXT: ret void
2611 //
test_vsoxei8_v_i16m4_m(vbool4_t mask,int16_t * base,vuint8m2_t bindex,vint16m4_t value,size_t vl)2612 void test_vsoxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex,
2613 vint16m4_t value, size_t vl) {
2614 return vsoxei8_v_i16m4_m(mask, base, bindex, value, vl);
2615 }
2616
2617 // CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8_m(
2618 // CHECK-RV64-NEXT: entry:
2619 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
2620 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2621 // CHECK-RV64-NEXT: ret void
2622 //
test_vsoxei8_v_i16m8_m(vbool2_t mask,int16_t * base,vuint8m4_t bindex,vint16m8_t value,size_t vl)2623 void test_vsoxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex,
2624 vint16m8_t value, size_t vl) {
2625 return vsoxei8_v_i16m8_m(mask, base, bindex, value, vl);
2626 }
2627
2628 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4_m(
2629 // CHECK-RV64-NEXT: entry:
2630 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2631 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2632 // CHECK-RV64-NEXT: ret void
2633 //
test_vsoxei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t value,size_t vl)2634 void test_vsoxei16_v_i16mf4_m(vbool64_t mask, int16_t *base,
2635 vuint16mf4_t bindex, vint16mf4_t value,
2636 size_t vl) {
2637 return vsoxei16_v_i16mf4_m(mask, base, bindex, value, vl);
2638 }
2639
2640 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2_m(
2641 // CHECK-RV64-NEXT: entry:
2642 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2643 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2644 // CHECK-RV64-NEXT: ret void
2645 //
test_vsoxei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t value,size_t vl)2646 void test_vsoxei16_v_i16mf2_m(vbool32_t mask, int16_t *base,
2647 vuint16mf2_t bindex, vint16mf2_t value,
2648 size_t vl) {
2649 return vsoxei16_v_i16mf2_m(mask, base, bindex, value, vl);
2650 }
2651
2652 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1_m(
2653 // CHECK-RV64-NEXT: entry:
2654 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2655 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2656 // CHECK-RV64-NEXT: ret void
2657 //
test_vsoxei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t value,size_t vl)2658 void test_vsoxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex,
2659 vint16m1_t value, size_t vl) {
2660 return vsoxei16_v_i16m1_m(mask, base, bindex, value, vl);
2661 }
2662
2663 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2_m(
2664 // CHECK-RV64-NEXT: entry:
2665 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2666 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2667 // CHECK-RV64-NEXT: ret void
2668 //
test_vsoxei16_v_i16m2_m(vbool8_t mask,int16_t * base,vuint16m2_t bindex,vint16m2_t value,size_t vl)2669 void test_vsoxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex,
2670 vint16m2_t value, size_t vl) {
2671 return vsoxei16_v_i16m2_m(mask, base, bindex, value, vl);
2672 }
2673
2674 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4_m(
2675 // CHECK-RV64-NEXT: entry:
2676 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2677 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2678 // CHECK-RV64-NEXT: ret void
2679 //
test_vsoxei16_v_i16m4_m(vbool4_t mask,int16_t * base,vuint16m4_t bindex,vint16m4_t value,size_t vl)2680 void test_vsoxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex,
2681 vint16m4_t value, size_t vl) {
2682 return vsoxei16_v_i16m4_m(mask, base, bindex, value, vl);
2683 }
2684
2685 // CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8_m(
2686 // CHECK-RV64-NEXT: entry:
2687 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
2688 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2689 // CHECK-RV64-NEXT: ret void
2690 //
test_vsoxei16_v_i16m8_m(vbool2_t mask,int16_t * base,vuint16m8_t bindex,vint16m8_t value,size_t vl)2691 void test_vsoxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex,
2692 vint16m8_t value, size_t vl) {
2693 return vsoxei16_v_i16m8_m(mask, base, bindex, value, vl);
2694 }
2695
2696 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4_m(
2697 // CHECK-RV64-NEXT: entry:
2698 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2699 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2700 // CHECK-RV64-NEXT: ret void
2701 //
test_vsoxei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t value,size_t vl)2702 void test_vsoxei32_v_i16mf4_m(vbool64_t mask, int16_t *base,
2703 vuint32mf2_t bindex, vint16mf4_t value,
2704 size_t vl) {
2705 return vsoxei32_v_i16mf4_m(mask, base, bindex, value, vl);
2706 }
2707
2708 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2_m(
2709 // CHECK-RV64-NEXT: entry:
2710 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2711 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2712 // CHECK-RV64-NEXT: ret void
2713 //
test_vsoxei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t value,size_t vl)2714 void test_vsoxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex,
2715 vint16mf2_t value, size_t vl) {
2716 return vsoxei32_v_i16mf2_m(mask, base, bindex, value, vl);
2717 }
2718
2719 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1_m(
2720 // CHECK-RV64-NEXT: entry:
2721 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2722 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2723 // CHECK-RV64-NEXT: ret void
2724 //
test_vsoxei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t value,size_t vl)2725 void test_vsoxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex,
2726 vint16m1_t value, size_t vl) {
2727 return vsoxei32_v_i16m1_m(mask, base, bindex, value, vl);
2728 }
2729
2730 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2_m(
2731 // CHECK-RV64-NEXT: entry:
2732 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2733 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2734 // CHECK-RV64-NEXT: ret void
2735 //
test_vsoxei32_v_i16m2_m(vbool8_t mask,int16_t * base,vuint32m4_t bindex,vint16m2_t value,size_t vl)2736 void test_vsoxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex,
2737 vint16m2_t value, size_t vl) {
2738 return vsoxei32_v_i16m2_m(mask, base, bindex, value, vl);
2739 }
2740
2741 // CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4_m(
2742 // CHECK-RV64-NEXT: entry:
2743 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
2744 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2745 // CHECK-RV64-NEXT: ret void
2746 //
test_vsoxei32_v_i16m4_m(vbool4_t mask,int16_t * base,vuint32m8_t bindex,vint16m4_t value,size_t vl)2747 void test_vsoxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex,
2748 vint16m4_t value, size_t vl) {
2749 return vsoxei32_v_i16m4_m(mask, base, bindex, value, vl);
2750 }
2751
2752 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4_m(
2753 // CHECK-RV64-NEXT: entry:
2754 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
2755 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2756 // CHECK-RV64-NEXT: ret void
2757 //
test_vsoxei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t value,size_t vl)2758 void test_vsoxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex,
2759 vint16mf4_t value, size_t vl) {
2760 return vsoxei64_v_i16mf4_m(mask, base, bindex, value, vl);
2761 }
2762
2763 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2_m(
2764 // CHECK-RV64-NEXT: entry:
2765 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
2766 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2767 // CHECK-RV64-NEXT: ret void
2768 //
test_vsoxei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t value,size_t vl)2769 void test_vsoxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex,
2770 vint16mf2_t value, size_t vl) {
2771 return vsoxei64_v_i16mf2_m(mask, base, bindex, value, vl);
2772 }
2773
2774 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1_m(
2775 // CHECK-RV64-NEXT: entry:
2776 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
2777 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2778 // CHECK-RV64-NEXT: ret void
2779 //
test_vsoxei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t value,size_t vl)2780 void test_vsoxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex,
2781 vint16m1_t value, size_t vl) {
2782 return vsoxei64_v_i16m1_m(mask, base, bindex, value, vl);
2783 }
2784
2785 // CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2_m(
2786 // CHECK-RV64-NEXT: entry:
2787 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
2788 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2789 // CHECK-RV64-NEXT: ret void
2790 //
test_vsoxei64_v_i16m2_m(vbool8_t mask,int16_t * base,vuint64m8_t bindex,vint16m2_t value,size_t vl)2791 void test_vsoxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex,
2792 vint16m2_t value, size_t vl) {
2793 return vsoxei64_v_i16m2_m(mask, base, bindex, value, vl);
2794 }
2795
2796 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2_m(
2797 // CHECK-RV64-NEXT: entry:
2798 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2799 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2800 // CHECK-RV64-NEXT: ret void
2801 //
test_vsoxei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t value,size_t vl)2802 void test_vsoxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex,
2803 vint32mf2_t value, size_t vl) {
2804 return vsoxei8_v_i32mf2_m(mask, base, bindex, value, vl);
2805 }
2806
2807 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1_m(
2808 // CHECK-RV64-NEXT: entry:
2809 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2810 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2811 // CHECK-RV64-NEXT: ret void
2812 //
test_vsoxei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t value,size_t vl)2813 void test_vsoxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex,
2814 vint32m1_t value, size_t vl) {
2815 return vsoxei8_v_i32m1_m(mask, base, bindex, value, vl);
2816 }
2817
2818 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2_m(
2819 // CHECK-RV64-NEXT: entry:
2820 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2821 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2822 // CHECK-RV64-NEXT: ret void
2823 //
test_vsoxei8_v_i32m2_m(vbool16_t mask,int32_t * base,vuint8mf2_t bindex,vint32m2_t value,size_t vl)2824 void test_vsoxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex,
2825 vint32m2_t value, size_t vl) {
2826 return vsoxei8_v_i32m2_m(mask, base, bindex, value, vl);
2827 }
2828
2829 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4_m(
2830 // CHECK-RV64-NEXT: entry:
2831 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2832 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2833 // CHECK-RV64-NEXT: ret void
2834 //
test_vsoxei8_v_i32m4_m(vbool8_t mask,int32_t * base,vuint8m1_t bindex,vint32m4_t value,size_t vl)2835 void test_vsoxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex,
2836 vint32m4_t value, size_t vl) {
2837 return vsoxei8_v_i32m4_m(mask, base, bindex, value, vl);
2838 }
2839
2840 // CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8_m(
2841 // CHECK-RV64-NEXT: entry:
2842 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2843 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2844 // CHECK-RV64-NEXT: ret void
2845 //
test_vsoxei8_v_i32m8_m(vbool4_t mask,int32_t * base,vuint8m2_t bindex,vint32m8_t value,size_t vl)2846 void test_vsoxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex,
2847 vint32m8_t value, size_t vl) {
2848 return vsoxei8_v_i32m8_m(mask, base, bindex, value, vl);
2849 }
2850
2851 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2_m(
2852 // CHECK-RV64-NEXT: entry:
2853 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2854 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2855 // CHECK-RV64-NEXT: ret void
2856 //
test_vsoxei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t value,size_t vl)2857 void test_vsoxei16_v_i32mf2_m(vbool64_t mask, int32_t *base,
2858 vuint16mf4_t bindex, vint32mf2_t value,
2859 size_t vl) {
2860 return vsoxei16_v_i32mf2_m(mask, base, bindex, value, vl);
2861 }
2862
2863 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1_m(
2864 // CHECK-RV64-NEXT: entry:
2865 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2866 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2867 // CHECK-RV64-NEXT: ret void
2868 //
test_vsoxei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t value,size_t vl)2869 void test_vsoxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex,
2870 vint32m1_t value, size_t vl) {
2871 return vsoxei16_v_i32m1_m(mask, base, bindex, value, vl);
2872 }
2873
2874 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2_m(
2875 // CHECK-RV64-NEXT: entry:
2876 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2877 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2878 // CHECK-RV64-NEXT: ret void
2879 //
test_vsoxei16_v_i32m2_m(vbool16_t mask,int32_t * base,vuint16m1_t bindex,vint32m2_t value,size_t vl)2880 void test_vsoxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex,
2881 vint32m2_t value, size_t vl) {
2882 return vsoxei16_v_i32m2_m(mask, base, bindex, value, vl);
2883 }
2884
2885 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4_m(
2886 // CHECK-RV64-NEXT: entry:
2887 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2888 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2889 // CHECK-RV64-NEXT: ret void
2890 //
test_vsoxei16_v_i32m4_m(vbool8_t mask,int32_t * base,vuint16m2_t bindex,vint32m4_t value,size_t vl)2891 void test_vsoxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex,
2892 vint32m4_t value, size_t vl) {
2893 return vsoxei16_v_i32m4_m(mask, base, bindex, value, vl);
2894 }
2895
2896 // CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8_m(
2897 // CHECK-RV64-NEXT: entry:
2898 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2899 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2900 // CHECK-RV64-NEXT: ret void
2901 //
test_vsoxei16_v_i32m8_m(vbool4_t mask,int32_t * base,vuint16m4_t bindex,vint32m8_t value,size_t vl)2902 void test_vsoxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex,
2903 vint32m8_t value, size_t vl) {
2904 return vsoxei16_v_i32m8_m(mask, base, bindex, value, vl);
2905 }
2906
2907 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2_m(
2908 // CHECK-RV64-NEXT: entry:
2909 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2910 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2911 // CHECK-RV64-NEXT: ret void
2912 //
test_vsoxei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t value,size_t vl)2913 void test_vsoxei32_v_i32mf2_m(vbool64_t mask, int32_t *base,
2914 vuint32mf2_t bindex, vint32mf2_t value,
2915 size_t vl) {
2916 return vsoxei32_v_i32mf2_m(mask, base, bindex, value, vl);
2917 }
2918
2919 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1_m(
2920 // CHECK-RV64-NEXT: entry:
2921 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2922 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2923 // CHECK-RV64-NEXT: ret void
2924 //
test_vsoxei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t value,size_t vl)2925 void test_vsoxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex,
2926 vint32m1_t value, size_t vl) {
2927 return vsoxei32_v_i32m1_m(mask, base, bindex, value, vl);
2928 }
2929
2930 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2_m(
2931 // CHECK-RV64-NEXT: entry:
2932 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2933 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2934 // CHECK-RV64-NEXT: ret void
2935 //
test_vsoxei32_v_i32m2_m(vbool16_t mask,int32_t * base,vuint32m2_t bindex,vint32m2_t value,size_t vl)2936 void test_vsoxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex,
2937 vint32m2_t value, size_t vl) {
2938 return vsoxei32_v_i32m2_m(mask, base, bindex, value, vl);
2939 }
2940
2941 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4_m(
2942 // CHECK-RV64-NEXT: entry:
2943 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2944 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2945 // CHECK-RV64-NEXT: ret void
2946 //
test_vsoxei32_v_i32m4_m(vbool8_t mask,int32_t * base,vuint32m4_t bindex,vint32m4_t value,size_t vl)2947 void test_vsoxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex,
2948 vint32m4_t value, size_t vl) {
2949 return vsoxei32_v_i32m4_m(mask, base, bindex, value, vl);
2950 }
2951
2952 // CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8_m(
2953 // CHECK-RV64-NEXT: entry:
2954 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
2955 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2956 // CHECK-RV64-NEXT: ret void
2957 //
test_vsoxei32_v_i32m8_m(vbool4_t mask,int32_t * base,vuint32m8_t bindex,vint32m8_t value,size_t vl)2958 void test_vsoxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex,
2959 vint32m8_t value, size_t vl) {
2960 return vsoxei32_v_i32m8_m(mask, base, bindex, value, vl);
2961 }
2962
2963 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2_m(
2964 // CHECK-RV64-NEXT: entry:
2965 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
2966 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2967 // CHECK-RV64-NEXT: ret void
2968 //
test_vsoxei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t value,size_t vl)2969 void test_vsoxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex,
2970 vint32mf2_t value, size_t vl) {
2971 return vsoxei64_v_i32mf2_m(mask, base, bindex, value, vl);
2972 }
2973
2974 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1_m(
2975 // CHECK-RV64-NEXT: entry:
2976 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
2977 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2978 // CHECK-RV64-NEXT: ret void
2979 //
test_vsoxei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t value,size_t vl)2980 void test_vsoxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex,
2981 vint32m1_t value, size_t vl) {
2982 return vsoxei64_v_i32m1_m(mask, base, bindex, value, vl);
2983 }
2984
2985 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2_m(
2986 // CHECK-RV64-NEXT: entry:
2987 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
2988 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
2989 // CHECK-RV64-NEXT: ret void
2990 //
test_vsoxei64_v_i32m2_m(vbool16_t mask,int32_t * base,vuint64m4_t bindex,vint32m2_t value,size_t vl)2991 void test_vsoxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex,
2992 vint32m2_t value, size_t vl) {
2993 return vsoxei64_v_i32m2_m(mask, base, bindex, value, vl);
2994 }
2995
2996 // CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4_m(
2997 // CHECK-RV64-NEXT: entry:
2998 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
2999 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3000 // CHECK-RV64-NEXT: ret void
3001 //
test_vsoxei64_v_i32m4_m(vbool8_t mask,int32_t * base,vuint64m8_t bindex,vint32m4_t value,size_t vl)3002 void test_vsoxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex,
3003 vint32m4_t value, size_t vl) {
3004 return vsoxei64_v_i32m4_m(mask, base, bindex, value, vl);
3005 }
3006
3007 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1_m(
3008 // CHECK-RV64-NEXT: entry:
3009 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3010 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3011 // CHECK-RV64-NEXT: ret void
3012 //
test_vsoxei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t value,size_t vl)3013 void test_vsoxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex,
3014 vint64m1_t value, size_t vl) {
3015 return vsoxei8_v_i64m1_m(mask, base, bindex, value, vl);
3016 }
3017
3018 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2_m(
3019 // CHECK-RV64-NEXT: entry:
3020 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3021 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3022 // CHECK-RV64-NEXT: ret void
3023 //
test_vsoxei8_v_i64m2_m(vbool32_t mask,int64_t * base,vuint8mf4_t bindex,vint64m2_t value,size_t vl)3024 void test_vsoxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex,
3025 vint64m2_t value, size_t vl) {
3026 return vsoxei8_v_i64m2_m(mask, base, bindex, value, vl);
3027 }
3028
3029 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4_m(
3030 // CHECK-RV64-NEXT: entry:
3031 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3032 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3033 // CHECK-RV64-NEXT: ret void
3034 //
test_vsoxei8_v_i64m4_m(vbool16_t mask,int64_t * base,vuint8mf2_t bindex,vint64m4_t value,size_t vl)3035 void test_vsoxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex,
3036 vint64m4_t value, size_t vl) {
3037 return vsoxei8_v_i64m4_m(mask, base, bindex, value, vl);
3038 }
3039
3040 // CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8_m(
3041 // CHECK-RV64-NEXT: entry:
3042 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3043 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3044 // CHECK-RV64-NEXT: ret void
3045 //
test_vsoxei8_v_i64m8_m(vbool8_t mask,int64_t * base,vuint8m1_t bindex,vint64m8_t value,size_t vl)3046 void test_vsoxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex,
3047 vint64m8_t value, size_t vl) {
3048 return vsoxei8_v_i64m8_m(mask, base, bindex, value, vl);
3049 }
3050
3051 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1_m(
3052 // CHECK-RV64-NEXT: entry:
3053 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3054 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3055 // CHECK-RV64-NEXT: ret void
3056 //
test_vsoxei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t value,size_t vl)3057 void test_vsoxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex,
3058 vint64m1_t value, size_t vl) {
3059 return vsoxei16_v_i64m1_m(mask, base, bindex, value, vl);
3060 }
3061
3062 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2_m(
3063 // CHECK-RV64-NEXT: entry:
3064 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3065 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3066 // CHECK-RV64-NEXT: ret void
3067 //
test_vsoxei16_v_i64m2_m(vbool32_t mask,int64_t * base,vuint16mf2_t bindex,vint64m2_t value,size_t vl)3068 void test_vsoxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex,
3069 vint64m2_t value, size_t vl) {
3070 return vsoxei16_v_i64m2_m(mask, base, bindex, value, vl);
3071 }
3072
3073 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4_m(
3074 // CHECK-RV64-NEXT: entry:
3075 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3076 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3077 // CHECK-RV64-NEXT: ret void
3078 //
test_vsoxei16_v_i64m4_m(vbool16_t mask,int64_t * base,vuint16m1_t bindex,vint64m4_t value,size_t vl)3079 void test_vsoxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex,
3080 vint64m4_t value, size_t vl) {
3081 return vsoxei16_v_i64m4_m(mask, base, bindex, value, vl);
3082 }
3083
3084 // CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8_m(
3085 // CHECK-RV64-NEXT: entry:
3086 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3087 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3088 // CHECK-RV64-NEXT: ret void
3089 //
test_vsoxei16_v_i64m8_m(vbool8_t mask,int64_t * base,vuint16m2_t bindex,vint64m8_t value,size_t vl)3090 void test_vsoxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex,
3091 vint64m8_t value, size_t vl) {
3092 return vsoxei16_v_i64m8_m(mask, base, bindex, value, vl);
3093 }
3094
3095 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1_m(
3096 // CHECK-RV64-NEXT: entry:
3097 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3098 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3099 // CHECK-RV64-NEXT: ret void
3100 //
test_vsoxei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t value,size_t vl)3101 void test_vsoxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex,
3102 vint64m1_t value, size_t vl) {
3103 return vsoxei32_v_i64m1_m(mask, base, bindex, value, vl);
3104 }
3105
3106 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2_m(
3107 // CHECK-RV64-NEXT: entry:
3108 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3109 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3110 // CHECK-RV64-NEXT: ret void
3111 //
test_vsoxei32_v_i64m2_m(vbool32_t mask,int64_t * base,vuint32m1_t bindex,vint64m2_t value,size_t vl)3112 void test_vsoxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex,
3113 vint64m2_t value, size_t vl) {
3114 return vsoxei32_v_i64m2_m(mask, base, bindex, value, vl);
3115 }
3116
3117 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4_m(
3118 // CHECK-RV64-NEXT: entry:
3119 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3120 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3121 // CHECK-RV64-NEXT: ret void
3122 //
test_vsoxei32_v_i64m4_m(vbool16_t mask,int64_t * base,vuint32m2_t bindex,vint64m4_t value,size_t vl)3123 void test_vsoxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex,
3124 vint64m4_t value, size_t vl) {
3125 return vsoxei32_v_i64m4_m(mask, base, bindex, value, vl);
3126 }
3127
3128 // CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8_m(
3129 // CHECK-RV64-NEXT: entry:
3130 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3131 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3132 // CHECK-RV64-NEXT: ret void
3133 //
test_vsoxei32_v_i64m8_m(vbool8_t mask,int64_t * base,vuint32m4_t bindex,vint64m8_t value,size_t vl)3134 void test_vsoxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex,
3135 vint64m8_t value, size_t vl) {
3136 return vsoxei32_v_i64m8_m(mask, base, bindex, value, vl);
3137 }
3138
3139 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1_m(
3140 // CHECK-RV64-NEXT: entry:
3141 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3142 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3143 // CHECK-RV64-NEXT: ret void
3144 //
test_vsoxei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t value,size_t vl)3145 void test_vsoxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex,
3146 vint64m1_t value, size_t vl) {
3147 return vsoxei64_v_i64m1_m(mask, base, bindex, value, vl);
3148 }
3149
3150 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2_m(
3151 // CHECK-RV64-NEXT: entry:
3152 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3153 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3154 // CHECK-RV64-NEXT: ret void
3155 //
test_vsoxei64_v_i64m2_m(vbool32_t mask,int64_t * base,vuint64m2_t bindex,vint64m2_t value,size_t vl)3156 void test_vsoxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex,
3157 vint64m2_t value, size_t vl) {
3158 return vsoxei64_v_i64m2_m(mask, base, bindex, value, vl);
3159 }
3160
3161 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4_m(
3162 // CHECK-RV64-NEXT: entry:
3163 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3164 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3165 // CHECK-RV64-NEXT: ret void
3166 //
test_vsoxei64_v_i64m4_m(vbool16_t mask,int64_t * base,vuint64m4_t bindex,vint64m4_t value,size_t vl)3167 void test_vsoxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex,
3168 vint64m4_t value, size_t vl) {
3169 return vsoxei64_v_i64m4_m(mask, base, bindex, value, vl);
3170 }
3171
3172 // CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8_m(
3173 // CHECK-RV64-NEXT: entry:
3174 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3175 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3176 // CHECK-RV64-NEXT: ret void
3177 //
test_vsoxei64_v_i64m8_m(vbool8_t mask,int64_t * base,vuint64m8_t bindex,vint64m8_t value,size_t vl)3178 void test_vsoxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex,
3179 vint64m8_t value, size_t vl) {
3180 return vsoxei64_v_i64m8_m(mask, base, bindex, value, vl);
3181 }
3182
3183 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8_m(
3184 // CHECK-RV64-NEXT: entry:
3185 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3186 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3187 // CHECK-RV64-NEXT: ret void
3188 //
test_vsoxei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t value,size_t vl)3189 void test_vsoxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex,
3190 vuint8mf8_t value, size_t vl) {
3191 return vsoxei8_v_u8mf8_m(mask, base, bindex, value, vl);
3192 }
3193
3194 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4_m(
3195 // CHECK-RV64-NEXT: entry:
3196 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3197 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3198 // CHECK-RV64-NEXT: ret void
3199 //
test_vsoxei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t value,size_t vl)3200 void test_vsoxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex,
3201 vuint8mf4_t value, size_t vl) {
3202 return vsoxei8_v_u8mf4_m(mask, base, bindex, value, vl);
3203 }
3204
3205 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2_m(
3206 // CHECK-RV64-NEXT: entry:
3207 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3208 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3209 // CHECK-RV64-NEXT: ret void
3210 //
test_vsoxei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t value,size_t vl)3211 void test_vsoxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex,
3212 vuint8mf2_t value, size_t vl) {
3213 return vsoxei8_v_u8mf2_m(mask, base, bindex, value, vl);
3214 }
3215
3216 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1_m(
3217 // CHECK-RV64-NEXT: entry:
3218 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3219 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3220 // CHECK-RV64-NEXT: ret void
3221 //
test_vsoxei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t value,size_t vl)3222 void test_vsoxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex,
3223 vuint8m1_t value, size_t vl) {
3224 return vsoxei8_v_u8m1_m(mask, base, bindex, value, vl);
3225 }
3226
3227 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2_m(
3228 // CHECK-RV64-NEXT: entry:
3229 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3230 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3231 // CHECK-RV64-NEXT: ret void
3232 //
test_vsoxei8_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint8m2_t bindex,vuint8m2_t value,size_t vl)3233 void test_vsoxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex,
3234 vuint8m2_t value, size_t vl) {
3235 return vsoxei8_v_u8m2_m(mask, base, bindex, value, vl);
3236 }
3237
3238 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4_m(
3239 // CHECK-RV64-NEXT: entry:
3240 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
3241 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3242 // CHECK-RV64-NEXT: ret void
3243 //
test_vsoxei8_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint8m4_t bindex,vuint8m4_t value,size_t vl)3244 void test_vsoxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex,
3245 vuint8m4_t value, size_t vl) {
3246 return vsoxei8_v_u8m4_m(mask, base, bindex, value, vl);
3247 }
3248
3249 // CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8_m(
3250 // CHECK-RV64-NEXT: entry:
3251 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 64 x i8>*
3252 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> [[VALUE:%.*]], <vscale x 64 x i8>* [[TMP0]], <vscale x 64 x i8> [[BINDEX:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3253 // CHECK-RV64-NEXT: ret void
3254 //
test_vsoxei8_v_u8m8_m(vbool1_t mask,uint8_t * base,vuint8m8_t bindex,vuint8m8_t value,size_t vl)3255 void test_vsoxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex,
3256 vuint8m8_t value, size_t vl) {
3257 return vsoxei8_v_u8m8_m(mask, base, bindex, value, vl);
3258 }
3259
3260 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8_m(
3261 // CHECK-RV64-NEXT: entry:
3262 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3263 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3264 // CHECK-RV64-NEXT: ret void
3265 //
test_vsoxei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t value,size_t vl)3266 void test_vsoxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex,
3267 vuint8mf8_t value, size_t vl) {
3268 return vsoxei16_v_u8mf8_m(mask, base, bindex, value, vl);
3269 }
3270
3271 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4_m(
3272 // CHECK-RV64-NEXT: entry:
3273 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3274 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3275 // CHECK-RV64-NEXT: ret void
3276 //
test_vsoxei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t value,size_t vl)3277 void test_vsoxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex,
3278 vuint8mf4_t value, size_t vl) {
3279 return vsoxei16_v_u8mf4_m(mask, base, bindex, value, vl);
3280 }
3281
3282 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2_m(
3283 // CHECK-RV64-NEXT: entry:
3284 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3285 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3286 // CHECK-RV64-NEXT: ret void
3287 //
test_vsoxei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t value,size_t vl)3288 void test_vsoxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex,
3289 vuint8mf2_t value, size_t vl) {
3290 return vsoxei16_v_u8mf2_m(mask, base, bindex, value, vl);
3291 }
3292
3293 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1_m(
3294 // CHECK-RV64-NEXT: entry:
3295 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3296 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3297 // CHECK-RV64-NEXT: ret void
3298 //
test_vsoxei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t value,size_t vl)3299 void test_vsoxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex,
3300 vuint8m1_t value, size_t vl) {
3301 return vsoxei16_v_u8m1_m(mask, base, bindex, value, vl);
3302 }
3303
3304 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2_m(
3305 // CHECK-RV64-NEXT: entry:
3306 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3307 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3308 // CHECK-RV64-NEXT: ret void
3309 //
test_vsoxei16_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint16m4_t bindex,vuint8m2_t value,size_t vl)3310 void test_vsoxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex,
3311 vuint8m2_t value, size_t vl) {
3312 return vsoxei16_v_u8m2_m(mask, base, bindex, value, vl);
3313 }
3314
3315 // CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4_m(
3316 // CHECK-RV64-NEXT: entry:
3317 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 32 x i8>*
3318 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[VALUE:%.*]], <vscale x 32 x i8>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3319 // CHECK-RV64-NEXT: ret void
3320 //
test_vsoxei16_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint16m8_t bindex,vuint8m4_t value,size_t vl)3321 void test_vsoxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex,
3322 vuint8m4_t value, size_t vl) {
3323 return vsoxei16_v_u8m4_m(mask, base, bindex, value, vl);
3324 }
3325
3326 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8_m(
3327 // CHECK-RV64-NEXT: entry:
3328 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3329 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3330 // CHECK-RV64-NEXT: ret void
3331 //
test_vsoxei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t value,size_t vl)3332 void test_vsoxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex,
3333 vuint8mf8_t value, size_t vl) {
3334 return vsoxei32_v_u8mf8_m(mask, base, bindex, value, vl);
3335 }
3336
3337 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4_m(
3338 // CHECK-RV64-NEXT: entry:
3339 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3340 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3341 // CHECK-RV64-NEXT: ret void
3342 //
test_vsoxei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t value,size_t vl)3343 void test_vsoxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex,
3344 vuint8mf4_t value, size_t vl) {
3345 return vsoxei32_v_u8mf4_m(mask, base, bindex, value, vl);
3346 }
3347
3348 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2_m(
3349 // CHECK-RV64-NEXT: entry:
3350 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3351 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3352 // CHECK-RV64-NEXT: ret void
3353 //
test_vsoxei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t value,size_t vl)3354 void test_vsoxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex,
3355 vuint8mf2_t value, size_t vl) {
3356 return vsoxei32_v_u8mf2_m(mask, base, bindex, value, vl);
3357 }
3358
3359 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1_m(
3360 // CHECK-RV64-NEXT: entry:
3361 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3362 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3363 // CHECK-RV64-NEXT: ret void
3364 //
test_vsoxei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t value,size_t vl)3365 void test_vsoxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex,
3366 vuint8m1_t value, size_t vl) {
3367 return vsoxei32_v_u8m1_m(mask, base, bindex, value, vl);
3368 }
3369
3370 // CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2_m(
3371 // CHECK-RV64-NEXT: entry:
3372 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 16 x i8>*
3373 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[VALUE:%.*]], <vscale x 16 x i8>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3374 // CHECK-RV64-NEXT: ret void
3375 //
test_vsoxei32_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint32m8_t bindex,vuint8m2_t value,size_t vl)3376 void test_vsoxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex,
3377 vuint8m2_t value, size_t vl) {
3378 return vsoxei32_v_u8m2_m(mask, base, bindex, value, vl);
3379 }
3380
3381 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8_m(
3382 // CHECK-RV64-NEXT: entry:
3383 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 1 x i8>*
3384 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[VALUE:%.*]], <vscale x 1 x i8>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3385 // CHECK-RV64-NEXT: ret void
3386 //
test_vsoxei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t value,size_t vl)3387 void test_vsoxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex,
3388 vuint8mf8_t value, size_t vl) {
3389 return vsoxei64_v_u8mf8_m(mask, base, bindex, value, vl);
3390 }
3391
3392 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4_m(
3393 // CHECK-RV64-NEXT: entry:
3394 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 2 x i8>*
3395 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[VALUE:%.*]], <vscale x 2 x i8>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3396 // CHECK-RV64-NEXT: ret void
3397 //
test_vsoxei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t value,size_t vl)3398 void test_vsoxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex,
3399 vuint8mf4_t value, size_t vl) {
3400 return vsoxei64_v_u8mf4_m(mask, base, bindex, value, vl);
3401 }
3402
3403 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2_m(
3404 // CHECK-RV64-NEXT: entry:
3405 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 4 x i8>*
3406 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[VALUE:%.*]], <vscale x 4 x i8>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3407 // CHECK-RV64-NEXT: ret void
3408 //
test_vsoxei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t value,size_t vl)3409 void test_vsoxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex,
3410 vuint8mf2_t value, size_t vl) {
3411 return vsoxei64_v_u8mf2_m(mask, base, bindex, value, vl);
3412 }
3413
3414 // CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1_m(
3415 // CHECK-RV64-NEXT: entry:
3416 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to <vscale x 8 x i8>*
3417 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[VALUE:%.*]], <vscale x 8 x i8>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3418 // CHECK-RV64-NEXT: ret void
3419 //
test_vsoxei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t value,size_t vl)3420 void test_vsoxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex,
3421 vuint8m1_t value, size_t vl) {
3422 return vsoxei64_v_u8m1_m(mask, base, bindex, value, vl);
3423 }
3424
3425 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4_m(
3426 // CHECK-RV64-NEXT: entry:
3427 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3428 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3429 // CHECK-RV64-NEXT: ret void
3430 //
test_vsoxei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t value,size_t vl)3431 void test_vsoxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex,
3432 vuint16mf4_t value, size_t vl) {
3433 return vsoxei8_v_u16mf4_m(mask, base, bindex, value, vl);
3434 }
3435
3436 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2_m(
3437 // CHECK-RV64-NEXT: entry:
3438 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3439 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3440 // CHECK-RV64-NEXT: ret void
3441 //
test_vsoxei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t value,size_t vl)3442 void test_vsoxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex,
3443 vuint16mf2_t value, size_t vl) {
3444 return vsoxei8_v_u16mf2_m(mask, base, bindex, value, vl);
3445 }
3446
3447 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1_m(
3448 // CHECK-RV64-NEXT: entry:
3449 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3450 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3451 // CHECK-RV64-NEXT: ret void
3452 //
test_vsoxei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t value,size_t vl)3453 void test_vsoxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex,
3454 vuint16m1_t value, size_t vl) {
3455 return vsoxei8_v_u16m1_m(mask, base, bindex, value, vl);
3456 }
3457
3458 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2_m(
3459 // CHECK-RV64-NEXT: entry:
3460 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3461 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3462 // CHECK-RV64-NEXT: ret void
3463 //
test_vsoxei8_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint8m1_t bindex,vuint16m2_t value,size_t vl)3464 void test_vsoxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex,
3465 vuint16m2_t value, size_t vl) {
3466 return vsoxei8_v_u16m2_m(mask, base, bindex, value, vl);
3467 }
3468
3469 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4_m(
3470 // CHECK-RV64-NEXT: entry:
3471 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3472 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3473 // CHECK-RV64-NEXT: ret void
3474 //
test_vsoxei8_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint8m2_t bindex,vuint16m4_t value,size_t vl)3475 void test_vsoxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex,
3476 vuint16m4_t value, size_t vl) {
3477 return vsoxei8_v_u16m4_m(mask, base, bindex, value, vl);
3478 }
3479
3480 // CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8_m(
3481 // CHECK-RV64-NEXT: entry:
3482 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
3483 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3484 // CHECK-RV64-NEXT: ret void
3485 //
test_vsoxei8_v_u16m8_m(vbool2_t mask,uint16_t * base,vuint8m4_t bindex,vuint16m8_t value,size_t vl)3486 void test_vsoxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex,
3487 vuint16m8_t value, size_t vl) {
3488 return vsoxei8_v_u16m8_m(mask, base, bindex, value, vl);
3489 }
3490
3491 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4_m(
3492 // CHECK-RV64-NEXT: entry:
3493 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3494 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3495 // CHECK-RV64-NEXT: ret void
3496 //
test_vsoxei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t value,size_t vl)3497 void test_vsoxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3498 vuint16mf4_t bindex, vuint16mf4_t value,
3499 size_t vl) {
3500 return vsoxei16_v_u16mf4_m(mask, base, bindex, value, vl);
3501 }
3502
3503 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2_m(
3504 // CHECK-RV64-NEXT: entry:
3505 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3506 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3507 // CHECK-RV64-NEXT: ret void
3508 //
test_vsoxei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t value,size_t vl)3509 void test_vsoxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3510 vuint16mf2_t bindex, vuint16mf2_t value,
3511 size_t vl) {
3512 return vsoxei16_v_u16mf2_m(mask, base, bindex, value, vl);
3513 }
3514
3515 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1_m(
3516 // CHECK-RV64-NEXT: entry:
3517 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3518 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3519 // CHECK-RV64-NEXT: ret void
3520 //
test_vsoxei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t value,size_t vl)3521 void test_vsoxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex,
3522 vuint16m1_t value, size_t vl) {
3523 return vsoxei16_v_u16m1_m(mask, base, bindex, value, vl);
3524 }
3525
3526 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2_m(
3527 // CHECK-RV64-NEXT: entry:
3528 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3529 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3530 // CHECK-RV64-NEXT: ret void
3531 //
test_vsoxei16_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint16m2_t bindex,vuint16m2_t value,size_t vl)3532 void test_vsoxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex,
3533 vuint16m2_t value, size_t vl) {
3534 return vsoxei16_v_u16m2_m(mask, base, bindex, value, vl);
3535 }
3536
3537 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4_m(
3538 // CHECK-RV64-NEXT: entry:
3539 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3540 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3541 // CHECK-RV64-NEXT: ret void
3542 //
test_vsoxei16_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint16m4_t bindex,vuint16m4_t value,size_t vl)3543 void test_vsoxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex,
3544 vuint16m4_t value, size_t vl) {
3545 return vsoxei16_v_u16m4_m(mask, base, bindex, value, vl);
3546 }
3547
3548 // CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8_m(
3549 // CHECK-RV64-NEXT: entry:
3550 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 32 x i16>*
3551 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> [[VALUE:%.*]], <vscale x 32 x i16>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3552 // CHECK-RV64-NEXT: ret void
3553 //
test_vsoxei16_v_u16m8_m(vbool2_t mask,uint16_t * base,vuint16m8_t bindex,vuint16m8_t value,size_t vl)3554 void test_vsoxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex,
3555 vuint16m8_t value, size_t vl) {
3556 return vsoxei16_v_u16m8_m(mask, base, bindex, value, vl);
3557 }
3558
3559 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4_m(
3560 // CHECK-RV64-NEXT: entry:
3561 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3562 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3563 // CHECK-RV64-NEXT: ret void
3564 //
test_vsoxei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t value,size_t vl)3565 void test_vsoxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3566 vuint32mf2_t bindex, vuint16mf4_t value,
3567 size_t vl) {
3568 return vsoxei32_v_u16mf4_m(mask, base, bindex, value, vl);
3569 }
3570
3571 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2_m(
3572 // CHECK-RV64-NEXT: entry:
3573 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3574 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3575 // CHECK-RV64-NEXT: ret void
3576 //
test_vsoxei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t value,size_t vl)3577 void test_vsoxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3578 vuint32m1_t bindex, vuint16mf2_t value,
3579 size_t vl) {
3580 return vsoxei32_v_u16mf2_m(mask, base, bindex, value, vl);
3581 }
3582
3583 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1_m(
3584 // CHECK-RV64-NEXT: entry:
3585 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3586 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3587 // CHECK-RV64-NEXT: ret void
3588 //
test_vsoxei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t value,size_t vl)3589 void test_vsoxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex,
3590 vuint16m1_t value, size_t vl) {
3591 return vsoxei32_v_u16m1_m(mask, base, bindex, value, vl);
3592 }
3593
3594 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2_m(
3595 // CHECK-RV64-NEXT: entry:
3596 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3597 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3598 // CHECK-RV64-NEXT: ret void
3599 //
test_vsoxei32_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint32m4_t bindex,vuint16m2_t value,size_t vl)3600 void test_vsoxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex,
3601 vuint16m2_t value, size_t vl) {
3602 return vsoxei32_v_u16m2_m(mask, base, bindex, value, vl);
3603 }
3604
3605 // CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4_m(
3606 // CHECK-RV64-NEXT: entry:
3607 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 16 x i16>*
3608 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[VALUE:%.*]], <vscale x 16 x i16>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3609 // CHECK-RV64-NEXT: ret void
3610 //
test_vsoxei32_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint32m8_t bindex,vuint16m4_t value,size_t vl)3611 void test_vsoxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex,
3612 vuint16m4_t value, size_t vl) {
3613 return vsoxei32_v_u16m4_m(mask, base, bindex, value, vl);
3614 }
3615
3616 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4_m(
3617 // CHECK-RV64-NEXT: entry:
3618 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 1 x i16>*
3619 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[VALUE:%.*]], <vscale x 1 x i16>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3620 // CHECK-RV64-NEXT: ret void
3621 //
test_vsoxei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t value,size_t vl)3622 void test_vsoxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base,
3623 vuint64m1_t bindex, vuint16mf4_t value,
3624 size_t vl) {
3625 return vsoxei64_v_u16mf4_m(mask, base, bindex, value, vl);
3626 }
3627
3628 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2_m(
3629 // CHECK-RV64-NEXT: entry:
3630 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 2 x i16>*
3631 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[VALUE:%.*]], <vscale x 2 x i16>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3632 // CHECK-RV64-NEXT: ret void
3633 //
test_vsoxei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t value,size_t vl)3634 void test_vsoxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base,
3635 vuint64m2_t bindex, vuint16mf2_t value,
3636 size_t vl) {
3637 return vsoxei64_v_u16mf2_m(mask, base, bindex, value, vl);
3638 }
3639
3640 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1_m(
3641 // CHECK-RV64-NEXT: entry:
3642 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 4 x i16>*
3643 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[VALUE:%.*]], <vscale x 4 x i16>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3644 // CHECK-RV64-NEXT: ret void
3645 //
test_vsoxei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t value,size_t vl)3646 void test_vsoxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex,
3647 vuint16m1_t value, size_t vl) {
3648 return vsoxei64_v_u16m1_m(mask, base, bindex, value, vl);
3649 }
3650
3651 // CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2_m(
3652 // CHECK-RV64-NEXT: entry:
3653 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to <vscale x 8 x i16>*
3654 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[VALUE:%.*]], <vscale x 8 x i16>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3655 // CHECK-RV64-NEXT: ret void
3656 //
test_vsoxei64_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint64m8_t bindex,vuint16m2_t value,size_t vl)3657 void test_vsoxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex,
3658 vuint16m2_t value, size_t vl) {
3659 return vsoxei64_v_u16m2_m(mask, base, bindex, value, vl);
3660 }
3661
3662 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2_m(
3663 // CHECK-RV64-NEXT: entry:
3664 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3665 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3666 // CHECK-RV64-NEXT: ret void
3667 //
test_vsoxei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t value,size_t vl)3668 void test_vsoxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex,
3669 vuint32mf2_t value, size_t vl) {
3670 return vsoxei8_v_u32mf2_m(mask, base, bindex, value, vl);
3671 }
3672
3673 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1_m(
3674 // CHECK-RV64-NEXT: entry:
3675 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3676 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3677 // CHECK-RV64-NEXT: ret void
3678 //
test_vsoxei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t value,size_t vl)3679 void test_vsoxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex,
3680 vuint32m1_t value, size_t vl) {
3681 return vsoxei8_v_u32m1_m(mask, base, bindex, value, vl);
3682 }
3683
3684 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2_m(
3685 // CHECK-RV64-NEXT: entry:
3686 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3687 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3688 // CHECK-RV64-NEXT: ret void
3689 //
test_vsoxei8_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint8mf2_t bindex,vuint32m2_t value,size_t vl)3690 void test_vsoxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex,
3691 vuint32m2_t value, size_t vl) {
3692 return vsoxei8_v_u32m2_m(mask, base, bindex, value, vl);
3693 }
3694
3695 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4_m(
3696 // CHECK-RV64-NEXT: entry:
3697 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3698 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3699 // CHECK-RV64-NEXT: ret void
3700 //
test_vsoxei8_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint8m1_t bindex,vuint32m4_t value,size_t vl)3701 void test_vsoxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex,
3702 vuint32m4_t value, size_t vl) {
3703 return vsoxei8_v_u32m4_m(mask, base, bindex, value, vl);
3704 }
3705
3706 // CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8_m(
3707 // CHECK-RV64-NEXT: entry:
3708 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3709 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3710 // CHECK-RV64-NEXT: ret void
3711 //
test_vsoxei8_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint8m2_t bindex,vuint32m8_t value,size_t vl)3712 void test_vsoxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex,
3713 vuint32m8_t value, size_t vl) {
3714 return vsoxei8_v_u32m8_m(mask, base, bindex, value, vl);
3715 }
3716
3717 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2_m(
3718 // CHECK-RV64-NEXT: entry:
3719 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3720 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3721 // CHECK-RV64-NEXT: ret void
3722 //
test_vsoxei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t value,size_t vl)3723 void test_vsoxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3724 vuint16mf4_t bindex, vuint32mf2_t value,
3725 size_t vl) {
3726 return vsoxei16_v_u32mf2_m(mask, base, bindex, value, vl);
3727 }
3728
3729 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1_m(
3730 // CHECK-RV64-NEXT: entry:
3731 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3732 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3733 // CHECK-RV64-NEXT: ret void
3734 //
test_vsoxei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t value,size_t vl)3735 void test_vsoxei16_v_u32m1_m(vbool32_t mask, uint32_t *base,
3736 vuint16mf2_t bindex, vuint32m1_t value,
3737 size_t vl) {
3738 return vsoxei16_v_u32m1_m(mask, base, bindex, value, vl);
3739 }
3740
3741 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2_m(
3742 // CHECK-RV64-NEXT: entry:
3743 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3744 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3745 // CHECK-RV64-NEXT: ret void
3746 //
test_vsoxei16_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint16m1_t bindex,vuint32m2_t value,size_t vl)3747 void test_vsoxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex,
3748 vuint32m2_t value, size_t vl) {
3749 return vsoxei16_v_u32m2_m(mask, base, bindex, value, vl);
3750 }
3751
3752 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4_m(
3753 // CHECK-RV64-NEXT: entry:
3754 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3755 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3756 // CHECK-RV64-NEXT: ret void
3757 //
test_vsoxei16_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint16m2_t bindex,vuint32m4_t value,size_t vl)3758 void test_vsoxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex,
3759 vuint32m4_t value, size_t vl) {
3760 return vsoxei16_v_u32m4_m(mask, base, bindex, value, vl);
3761 }
3762
3763 // CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8_m(
3764 // CHECK-RV64-NEXT: entry:
3765 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3766 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3767 // CHECK-RV64-NEXT: ret void
3768 //
test_vsoxei16_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint16m4_t bindex,vuint32m8_t value,size_t vl)3769 void test_vsoxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex,
3770 vuint32m8_t value, size_t vl) {
3771 return vsoxei16_v_u32m8_m(mask, base, bindex, value, vl);
3772 }
3773
3774 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2_m(
3775 // CHECK-RV64-NEXT: entry:
3776 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3777 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3778 // CHECK-RV64-NEXT: ret void
3779 //
test_vsoxei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t value,size_t vl)3780 void test_vsoxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3781 vuint32mf2_t bindex, vuint32mf2_t value,
3782 size_t vl) {
3783 return vsoxei32_v_u32mf2_m(mask, base, bindex, value, vl);
3784 }
3785
3786 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1_m(
3787 // CHECK-RV64-NEXT: entry:
3788 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3789 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3790 // CHECK-RV64-NEXT: ret void
3791 //
test_vsoxei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t value,size_t vl)3792 void test_vsoxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex,
3793 vuint32m1_t value, size_t vl) {
3794 return vsoxei32_v_u32m1_m(mask, base, bindex, value, vl);
3795 }
3796
3797 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2_m(
3798 // CHECK-RV64-NEXT: entry:
3799 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3800 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3801 // CHECK-RV64-NEXT: ret void
3802 //
test_vsoxei32_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint32m2_t bindex,vuint32m2_t value,size_t vl)3803 void test_vsoxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex,
3804 vuint32m2_t value, size_t vl) {
3805 return vsoxei32_v_u32m2_m(mask, base, bindex, value, vl);
3806 }
3807
3808 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4_m(
3809 // CHECK-RV64-NEXT: entry:
3810 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3811 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3812 // CHECK-RV64-NEXT: ret void
3813 //
test_vsoxei32_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint32m4_t bindex,vuint32m4_t value,size_t vl)3814 void test_vsoxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex,
3815 vuint32m4_t value, size_t vl) {
3816 return vsoxei32_v_u32m4_m(mask, base, bindex, value, vl);
3817 }
3818
3819 // CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8_m(
3820 // CHECK-RV64-NEXT: entry:
3821 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 16 x i32>*
3822 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> [[VALUE:%.*]], <vscale x 16 x i32>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3823 // CHECK-RV64-NEXT: ret void
3824 //
test_vsoxei32_v_u32m8_m(vbool4_t mask,uint32_t * base,vuint32m8_t bindex,vuint32m8_t value,size_t vl)3825 void test_vsoxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex,
3826 vuint32m8_t value, size_t vl) {
3827 return vsoxei32_v_u32m8_m(mask, base, bindex, value, vl);
3828 }
3829
3830 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2_m(
3831 // CHECK-RV64-NEXT: entry:
3832 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 1 x i32>*
3833 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[VALUE:%.*]], <vscale x 1 x i32>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3834 // CHECK-RV64-NEXT: ret void
3835 //
test_vsoxei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t value,size_t vl)3836 void test_vsoxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base,
3837 vuint64m1_t bindex, vuint32mf2_t value,
3838 size_t vl) {
3839 return vsoxei64_v_u32mf2_m(mask, base, bindex, value, vl);
3840 }
3841
3842 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1_m(
3843 // CHECK-RV64-NEXT: entry:
3844 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 2 x i32>*
3845 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[VALUE:%.*]], <vscale x 2 x i32>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3846 // CHECK-RV64-NEXT: ret void
3847 //
test_vsoxei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t value,size_t vl)3848 void test_vsoxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex,
3849 vuint32m1_t value, size_t vl) {
3850 return vsoxei64_v_u32m1_m(mask, base, bindex, value, vl);
3851 }
3852
3853 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2_m(
3854 // CHECK-RV64-NEXT: entry:
3855 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 4 x i32>*
3856 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[VALUE:%.*]], <vscale x 4 x i32>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3857 // CHECK-RV64-NEXT: ret void
3858 //
test_vsoxei64_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint64m4_t bindex,vuint32m2_t value,size_t vl)3859 void test_vsoxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex,
3860 vuint32m2_t value, size_t vl) {
3861 return vsoxei64_v_u32m2_m(mask, base, bindex, value, vl);
3862 }
3863
3864 // CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4_m(
3865 // CHECK-RV64-NEXT: entry:
3866 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to <vscale x 8 x i32>*
3867 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[VALUE:%.*]], <vscale x 8 x i32>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3868 // CHECK-RV64-NEXT: ret void
3869 //
test_vsoxei64_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint64m8_t bindex,vuint32m4_t value,size_t vl)3870 void test_vsoxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex,
3871 vuint32m4_t value, size_t vl) {
3872 return vsoxei64_v_u32m4_m(mask, base, bindex, value, vl);
3873 }
3874
3875 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1_m(
3876 // CHECK-RV64-NEXT: entry:
3877 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3878 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3879 // CHECK-RV64-NEXT: ret void
3880 //
test_vsoxei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t value,size_t vl)3881 void test_vsoxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex,
3882 vuint64m1_t value, size_t vl) {
3883 return vsoxei8_v_u64m1_m(mask, base, bindex, value, vl);
3884 }
3885
3886 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2_m(
3887 // CHECK-RV64-NEXT: entry:
3888 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3889 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3890 // CHECK-RV64-NEXT: ret void
3891 //
test_vsoxei8_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint8mf4_t bindex,vuint64m2_t value,size_t vl)3892 void test_vsoxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex,
3893 vuint64m2_t value, size_t vl) {
3894 return vsoxei8_v_u64m2_m(mask, base, bindex, value, vl);
3895 }
3896
3897 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4_m(
3898 // CHECK-RV64-NEXT: entry:
3899 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3900 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3901 // CHECK-RV64-NEXT: ret void
3902 //
test_vsoxei8_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint8mf2_t bindex,vuint64m4_t value,size_t vl)3903 void test_vsoxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex,
3904 vuint64m4_t value, size_t vl) {
3905 return vsoxei8_v_u64m4_m(mask, base, bindex, value, vl);
3906 }
3907
3908 // CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8_m(
3909 // CHECK-RV64-NEXT: entry:
3910 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3911 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3912 // CHECK-RV64-NEXT: ret void
3913 //
test_vsoxei8_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint8m1_t bindex,vuint64m8_t value,size_t vl)3914 void test_vsoxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex,
3915 vuint64m8_t value, size_t vl) {
3916 return vsoxei8_v_u64m8_m(mask, base, bindex, value, vl);
3917 }
3918
3919 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1_m(
3920 // CHECK-RV64-NEXT: entry:
3921 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3922 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3923 // CHECK-RV64-NEXT: ret void
3924 //
test_vsoxei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t value,size_t vl)3925 void test_vsoxei16_v_u64m1_m(vbool64_t mask, uint64_t *base,
3926 vuint16mf4_t bindex, vuint64m1_t value,
3927 size_t vl) {
3928 return vsoxei16_v_u64m1_m(mask, base, bindex, value, vl);
3929 }
3930
3931 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2_m(
3932 // CHECK-RV64-NEXT: entry:
3933 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3934 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3935 // CHECK-RV64-NEXT: ret void
3936 //
test_vsoxei16_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint16mf2_t bindex,vuint64m2_t value,size_t vl)3937 void test_vsoxei16_v_u64m2_m(vbool32_t mask, uint64_t *base,
3938 vuint16mf2_t bindex, vuint64m2_t value,
3939 size_t vl) {
3940 return vsoxei16_v_u64m2_m(mask, base, bindex, value, vl);
3941 }
3942
3943 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4_m(
3944 // CHECK-RV64-NEXT: entry:
3945 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3946 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3947 // CHECK-RV64-NEXT: ret void
3948 //
test_vsoxei16_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint16m1_t bindex,vuint64m4_t value,size_t vl)3949 void test_vsoxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex,
3950 vuint64m4_t value, size_t vl) {
3951 return vsoxei16_v_u64m4_m(mask, base, bindex, value, vl);
3952 }
3953
3954 // CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8_m(
3955 // CHECK-RV64-NEXT: entry:
3956 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
3957 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3958 // CHECK-RV64-NEXT: ret void
3959 //
test_vsoxei16_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint16m2_t bindex,vuint64m8_t value,size_t vl)3960 void test_vsoxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex,
3961 vuint64m8_t value, size_t vl) {
3962 return vsoxei16_v_u64m8_m(mask, base, bindex, value, vl);
3963 }
3964
3965 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1_m(
3966 // CHECK-RV64-NEXT: entry:
3967 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
3968 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3969 // CHECK-RV64-NEXT: ret void
3970 //
test_vsoxei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t value,size_t vl)3971 void test_vsoxei32_v_u64m1_m(vbool64_t mask, uint64_t *base,
3972 vuint32mf2_t bindex, vuint64m1_t value,
3973 size_t vl) {
3974 return vsoxei32_v_u64m1_m(mask, base, bindex, value, vl);
3975 }
3976
3977 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2_m(
3978 // CHECK-RV64-NEXT: entry:
3979 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
3980 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3981 // CHECK-RV64-NEXT: ret void
3982 //
test_vsoxei32_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint32m1_t bindex,vuint64m2_t value,size_t vl)3983 void test_vsoxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex,
3984 vuint64m2_t value, size_t vl) {
3985 return vsoxei32_v_u64m2_m(mask, base, bindex, value, vl);
3986 }
3987
3988 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4_m(
3989 // CHECK-RV64-NEXT: entry:
3990 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
3991 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
3992 // CHECK-RV64-NEXT: ret void
3993 //
test_vsoxei32_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint32m2_t bindex,vuint64m4_t value,size_t vl)3994 void test_vsoxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex,
3995 vuint64m4_t value, size_t vl) {
3996 return vsoxei32_v_u64m4_m(mask, base, bindex, value, vl);
3997 }
3998
3999 // CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8_m(
4000 // CHECK-RV64-NEXT: entry:
4001 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4002 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4003 // CHECK-RV64-NEXT: ret void
4004 //
test_vsoxei32_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint32m4_t bindex,vuint64m8_t value,size_t vl)4005 void test_vsoxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex,
4006 vuint64m8_t value, size_t vl) {
4007 return vsoxei32_v_u64m8_m(mask, base, bindex, value, vl);
4008 }
4009
4010 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1_m(
4011 // CHECK-RV64-NEXT: entry:
4012 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 1 x i64>*
4013 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[VALUE:%.*]], <vscale x 1 x i64>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4014 // CHECK-RV64-NEXT: ret void
4015 //
test_vsoxei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t value,size_t vl)4016 void test_vsoxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex,
4017 vuint64m1_t value, size_t vl) {
4018 return vsoxei64_v_u64m1_m(mask, base, bindex, value, vl);
4019 }
4020
4021 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2_m(
4022 // CHECK-RV64-NEXT: entry:
4023 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 2 x i64>*
4024 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[VALUE:%.*]], <vscale x 2 x i64>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4025 // CHECK-RV64-NEXT: ret void
4026 //
test_vsoxei64_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint64m2_t bindex,vuint64m2_t value,size_t vl)4027 void test_vsoxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex,
4028 vuint64m2_t value, size_t vl) {
4029 return vsoxei64_v_u64m2_m(mask, base, bindex, value, vl);
4030 }
4031
4032 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4_m(
4033 // CHECK-RV64-NEXT: entry:
4034 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 4 x i64>*
4035 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[VALUE:%.*]], <vscale x 4 x i64>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4036 // CHECK-RV64-NEXT: ret void
4037 //
test_vsoxei64_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint64m4_t bindex,vuint64m4_t value,size_t vl)4038 void test_vsoxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex,
4039 vuint64m4_t value, size_t vl) {
4040 return vsoxei64_v_u64m4_m(mask, base, bindex, value, vl);
4041 }
4042
4043 // CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8_m(
4044 // CHECK-RV64-NEXT: entry:
4045 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to <vscale x 8 x i64>*
4046 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> [[VALUE:%.*]], <vscale x 8 x i64>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4047 // CHECK-RV64-NEXT: ret void
4048 //
test_vsoxei64_v_u64m8_m(vbool8_t mask,uint64_t * base,vuint64m8_t bindex,vuint64m8_t value,size_t vl)4049 void test_vsoxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex,
4050 vuint64m8_t value, size_t vl) {
4051 return vsoxei64_v_u64m8_m(mask, base, bindex, value, vl);
4052 }
4053
4054 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16mf4_m(
4055 // CHECK-RV64-NEXT: entry:
4056 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
4057 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f16.nxv1i8.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4058 // CHECK-RV64-NEXT: ret void
4059 //
test_vsoxei8_v_f16mf4_m(vbool64_t mask,_Float16 * base,vuint8mf8_t bindex,vfloat16mf4_t value,size_t vl)4060 void test_vsoxei8_v_f16mf4_m(vbool64_t mask, _Float16 *base, vuint8mf8_t bindex, vfloat16mf4_t value, size_t vl) {
4061 return vsoxei8_v_f16mf4_m(mask, base, bindex, value, vl);
4062 }
4063
4064 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16mf2_m(
4065 // CHECK-RV64-NEXT: entry:
4066 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
4067 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f16.nxv2i8.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4068 // CHECK-RV64-NEXT: ret void
4069 //
test_vsoxei8_v_f16mf2_m(vbool32_t mask,_Float16 * base,vuint8mf4_t bindex,vfloat16mf2_t value,size_t vl)4070 void test_vsoxei8_v_f16mf2_m(vbool32_t mask, _Float16 *base, vuint8mf4_t bindex, vfloat16mf2_t value, size_t vl) {
4071 return vsoxei8_v_f16mf2_m(mask, base, bindex, value, vl);
4072 }
4073
4074 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m1_m(
4075 // CHECK-RV64-NEXT: entry:
4076 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
4077 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f16.nxv4i8.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4078 // CHECK-RV64-NEXT: ret void
4079 //
test_vsoxei8_v_f16m1_m(vbool16_t mask,_Float16 * base,vuint8mf2_t bindex,vfloat16m1_t value,size_t vl)4080 void test_vsoxei8_v_f16m1_m(vbool16_t mask, _Float16 *base, vuint8mf2_t bindex, vfloat16m1_t value, size_t vl) {
4081 return vsoxei8_v_f16m1_m(mask, base, bindex, value, vl);
4082 }
4083
4084 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m2_m(
4085 // CHECK-RV64-NEXT: entry:
4086 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
4087 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f16.nxv8i8.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4088 // CHECK-RV64-NEXT: ret void
4089 //
test_vsoxei8_v_f16m2_m(vbool8_t mask,_Float16 * base,vuint8m1_t bindex,vfloat16m2_t value,size_t vl)4090 void test_vsoxei8_v_f16m2_m(vbool8_t mask, _Float16 *base, vuint8m1_t bindex, vfloat16m2_t value, size_t vl) {
4091 return vsoxei8_v_f16m2_m(mask, base, bindex, value, vl);
4092 }
4093
4094 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m4_m(
4095 // CHECK-RV64-NEXT: entry:
4096 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
4097 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f16.nxv16i8.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4098 // CHECK-RV64-NEXT: ret void
4099 //
test_vsoxei8_v_f16m4_m(vbool4_t mask,_Float16 * base,vuint8m2_t bindex,vfloat16m4_t value,size_t vl)4100 void test_vsoxei8_v_f16m4_m(vbool4_t mask, _Float16 *base, vuint8m2_t bindex, vfloat16m4_t value, size_t vl) {
4101 return vsoxei8_v_f16m4_m(mask, base, bindex, value, vl);
4102 }
4103
4104 // CHECK-RV64-LABEL: @test_vsoxei8_v_f16m8_m(
4105 // CHECK-RV64-NEXT: entry:
4106 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 32 x half>*
4107 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32f16.nxv32i8.i64(<vscale x 32 x half> [[VALUE:%.*]], <vscale x 32 x half>* [[TMP0]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4108 // CHECK-RV64-NEXT: ret void
4109 //
test_vsoxei8_v_f16m8_m(vbool2_t mask,_Float16 * base,vuint8m4_t bindex,vfloat16m8_t value,size_t vl)4110 void test_vsoxei8_v_f16m8_m(vbool2_t mask, _Float16 *base, vuint8m4_t bindex, vfloat16m8_t value, size_t vl) {
4111 return vsoxei8_v_f16m8_m(mask, base, bindex, value, vl);
4112 }
4113
4114 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16mf4_m(
4115 // CHECK-RV64-NEXT: entry:
4116 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
4117 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f16.nxv1i16.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4118 // CHECK-RV64-NEXT: ret void
4119 //
test_vsoxei16_v_f16mf4_m(vbool64_t mask,_Float16 * base,vuint16mf4_t bindex,vfloat16mf4_t value,size_t vl)4120 void test_vsoxei16_v_f16mf4_m(vbool64_t mask, _Float16 *base, vuint16mf4_t bindex, vfloat16mf4_t value, size_t vl) {
4121 return vsoxei16_v_f16mf4_m(mask, base, bindex, value, vl);
4122 }
4123
4124 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16mf2_m(
4125 // CHECK-RV64-NEXT: entry:
4126 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
4127 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f16.nxv2i16.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4128 // CHECK-RV64-NEXT: ret void
4129 //
test_vsoxei16_v_f16mf2_m(vbool32_t mask,_Float16 * base,vuint16mf2_t bindex,vfloat16mf2_t value,size_t vl)4130 void test_vsoxei16_v_f16mf2_m(vbool32_t mask, _Float16 *base, vuint16mf2_t bindex, vfloat16mf2_t value, size_t vl) {
4131 return vsoxei16_v_f16mf2_m(mask, base, bindex, value, vl);
4132 }
4133
4134 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m1_m(
4135 // CHECK-RV64-NEXT: entry:
4136 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
4137 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f16.nxv4i16.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4138 // CHECK-RV64-NEXT: ret void
4139 //
test_vsoxei16_v_f16m1_m(vbool16_t mask,_Float16 * base,vuint16m1_t bindex,vfloat16m1_t value,size_t vl)4140 void test_vsoxei16_v_f16m1_m(vbool16_t mask, _Float16 *base, vuint16m1_t bindex, vfloat16m1_t value, size_t vl) {
4141 return vsoxei16_v_f16m1_m(mask, base, bindex, value, vl);
4142 }
4143
4144 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m2_m(
4145 // CHECK-RV64-NEXT: entry:
4146 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
4147 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f16.nxv8i16.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4148 // CHECK-RV64-NEXT: ret void
4149 //
test_vsoxei16_v_f16m2_m(vbool8_t mask,_Float16 * base,vuint16m2_t bindex,vfloat16m2_t value,size_t vl)4150 void test_vsoxei16_v_f16m2_m(vbool8_t mask, _Float16 *base, vuint16m2_t bindex, vfloat16m2_t value, size_t vl) {
4151 return vsoxei16_v_f16m2_m(mask, base, bindex, value, vl);
4152 }
4153
4154 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m4_m(
4155 // CHECK-RV64-NEXT: entry:
4156 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
4157 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f16.nxv16i16.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4158 // CHECK-RV64-NEXT: ret void
4159 //
test_vsoxei16_v_f16m4_m(vbool4_t mask,_Float16 * base,vuint16m4_t bindex,vfloat16m4_t value,size_t vl)4160 void test_vsoxei16_v_f16m4_m(vbool4_t mask, _Float16 *base, vuint16m4_t bindex, vfloat16m4_t value, size_t vl) {
4161 return vsoxei16_v_f16m4_m(mask, base, bindex, value, vl);
4162 }
4163
4164 // CHECK-RV64-LABEL: @test_vsoxei16_v_f16m8_m(
4165 // CHECK-RV64-NEXT: entry:
4166 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 32 x half>*
4167 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32f16.nxv32i16.i64(<vscale x 32 x half> [[VALUE:%.*]], <vscale x 32 x half>* [[TMP0]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4168 // CHECK-RV64-NEXT: ret void
4169 //
test_vsoxei16_v_f16m8_m(vbool2_t mask,_Float16 * base,vuint16m8_t bindex,vfloat16m8_t value,size_t vl)4170 void test_vsoxei16_v_f16m8_m(vbool2_t mask, _Float16 *base, vuint16m8_t bindex, vfloat16m8_t value, size_t vl) {
4171 return vsoxei16_v_f16m8_m(mask, base, bindex, value, vl);
4172 }
4173
4174 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16mf4_m(
4175 // CHECK-RV64-NEXT: entry:
4176 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
4177 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f16.nxv1i32.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4178 // CHECK-RV64-NEXT: ret void
4179 //
test_vsoxei32_v_f16mf4_m(vbool64_t mask,_Float16 * base,vuint32mf2_t bindex,vfloat16mf4_t value,size_t vl)4180 void test_vsoxei32_v_f16mf4_m(vbool64_t mask, _Float16 *base, vuint32mf2_t bindex, vfloat16mf4_t value, size_t vl) {
4181 return vsoxei32_v_f16mf4_m(mask, base, bindex, value, vl);
4182 }
4183
4184 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16mf2_m(
4185 // CHECK-RV64-NEXT: entry:
4186 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
4187 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f16.nxv2i32.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4188 // CHECK-RV64-NEXT: ret void
4189 //
test_vsoxei32_v_f16mf2_m(vbool32_t mask,_Float16 * base,vuint32m1_t bindex,vfloat16mf2_t value,size_t vl)4190 void test_vsoxei32_v_f16mf2_m(vbool32_t mask, _Float16 *base, vuint32m1_t bindex, vfloat16mf2_t value, size_t vl) {
4191 return vsoxei32_v_f16mf2_m(mask, base, bindex, value, vl);
4192 }
4193
4194 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m1_m(
4195 // CHECK-RV64-NEXT: entry:
4196 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
4197 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f16.nxv4i32.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4198 // CHECK-RV64-NEXT: ret void
4199 //
test_vsoxei32_v_f16m1_m(vbool16_t mask,_Float16 * base,vuint32m2_t bindex,vfloat16m1_t value,size_t vl)4200 void test_vsoxei32_v_f16m1_m(vbool16_t mask, _Float16 *base, vuint32m2_t bindex, vfloat16m1_t value, size_t vl) {
4201 return vsoxei32_v_f16m1_m(mask, base, bindex, value, vl);
4202 }
4203
4204 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m2_m(
4205 // CHECK-RV64-NEXT: entry:
4206 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
4207 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f16.nxv8i32.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4208 // CHECK-RV64-NEXT: ret void
4209 //
test_vsoxei32_v_f16m2_m(vbool8_t mask,_Float16 * base,vuint32m4_t bindex,vfloat16m2_t value,size_t vl)4210 void test_vsoxei32_v_f16m2_m(vbool8_t mask, _Float16 *base, vuint32m4_t bindex, vfloat16m2_t value, size_t vl) {
4211 return vsoxei32_v_f16m2_m(mask, base, bindex, value, vl);
4212 }
4213
4214 // CHECK-RV64-LABEL: @test_vsoxei32_v_f16m4_m(
4215 // CHECK-RV64-NEXT: entry:
4216 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 16 x half>*
4217 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f16.nxv16i32.i64(<vscale x 16 x half> [[VALUE:%.*]], <vscale x 16 x half>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4218 // CHECK-RV64-NEXT: ret void
4219 //
test_vsoxei32_v_f16m4_m(vbool4_t mask,_Float16 * base,vuint32m8_t bindex,vfloat16m4_t value,size_t vl)4220 void test_vsoxei32_v_f16m4_m(vbool4_t mask, _Float16 *base, vuint32m8_t bindex, vfloat16m4_t value, size_t vl) {
4221 return vsoxei32_v_f16m4_m(mask, base, bindex, value, vl);
4222 }
4223
4224 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16mf4_m(
4225 // CHECK-RV64-NEXT: entry:
4226 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 1 x half>*
4227 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f16.nxv1i64.i64(<vscale x 1 x half> [[VALUE:%.*]], <vscale x 1 x half>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4228 // CHECK-RV64-NEXT: ret void
4229 //
test_vsoxei64_v_f16mf4_m(vbool64_t mask,_Float16 * base,vuint64m1_t bindex,vfloat16mf4_t value,size_t vl)4230 void test_vsoxei64_v_f16mf4_m(vbool64_t mask, _Float16 *base, vuint64m1_t bindex, vfloat16mf4_t value, size_t vl) {
4231 return vsoxei64_v_f16mf4_m(mask, base, bindex, value, vl);
4232 }
4233
4234 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16mf2_m(
4235 // CHECK-RV64-NEXT: entry:
4236 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 2 x half>*
4237 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f16.nxv2i64.i64(<vscale x 2 x half> [[VALUE:%.*]], <vscale x 2 x half>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4238 // CHECK-RV64-NEXT: ret void
4239 //
test_vsoxei64_v_f16mf2_m(vbool32_t mask,_Float16 * base,vuint64m2_t bindex,vfloat16mf2_t value,size_t vl)4240 void test_vsoxei64_v_f16mf2_m(vbool32_t mask, _Float16 *base, vuint64m2_t bindex, vfloat16mf2_t value, size_t vl) {
4241 return vsoxei64_v_f16mf2_m(mask, base, bindex, value, vl);
4242 }
4243
4244 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16m1_m(
4245 // CHECK-RV64-NEXT: entry:
4246 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 4 x half>*
4247 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f16.nxv4i64.i64(<vscale x 4 x half> [[VALUE:%.*]], <vscale x 4 x half>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4248 // CHECK-RV64-NEXT: ret void
4249 //
test_vsoxei64_v_f16m1_m(vbool16_t mask,_Float16 * base,vuint64m4_t bindex,vfloat16m1_t value,size_t vl)4250 void test_vsoxei64_v_f16m1_m(vbool16_t mask, _Float16 *base, vuint64m4_t bindex, vfloat16m1_t value, size_t vl) {
4251 return vsoxei64_v_f16m1_m(mask, base, bindex, value, vl);
4252 }
4253
4254 // CHECK-RV64-LABEL: @test_vsoxei64_v_f16m2_m(
4255 // CHECK-RV64-NEXT: entry:
4256 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast half* [[BASE:%.*]] to <vscale x 8 x half>*
4257 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f16.nxv8i64.i64(<vscale x 8 x half> [[VALUE:%.*]], <vscale x 8 x half>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4258 // CHECK-RV64-NEXT: ret void
4259 //
test_vsoxei64_v_f16m2_m(vbool8_t mask,_Float16 * base,vuint64m8_t bindex,vfloat16m2_t value,size_t vl)4260 void test_vsoxei64_v_f16m2_m(vbool8_t mask, _Float16 *base, vuint64m8_t bindex, vfloat16m2_t value, size_t vl) {
4261 return vsoxei64_v_f16m2_m(mask, base, bindex, value, vl);
4262 }
4263
4264 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2_m(
4265 // CHECK-RV64-NEXT: entry:
4266 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4267 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4268 // CHECK-RV64-NEXT: ret void
4269 //
test_vsoxei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t value,size_t vl)4270 void test_vsoxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex,
4271 vfloat32mf2_t value, size_t vl) {
4272 return vsoxei8_v_f32mf2_m(mask, base, bindex, value, vl);
4273 }
4274
4275 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1_m(
4276 // CHECK-RV64-NEXT: entry:
4277 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4278 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4279 // CHECK-RV64-NEXT: ret void
4280 //
test_vsoxei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t value,size_t vl)4281 void test_vsoxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex,
4282 vfloat32m1_t value, size_t vl) {
4283 return vsoxei8_v_f32m1_m(mask, base, bindex, value, vl);
4284 }
4285
4286 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2_m(
4287 // CHECK-RV64-NEXT: entry:
4288 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4289 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4290 // CHECK-RV64-NEXT: ret void
4291 //
test_vsoxei8_v_f32m2_m(vbool16_t mask,float * base,vuint8mf2_t bindex,vfloat32m2_t value,size_t vl)4292 void test_vsoxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex,
4293 vfloat32m2_t value, size_t vl) {
4294 return vsoxei8_v_f32m2_m(mask, base, bindex, value, vl);
4295 }
4296
4297 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4_m(
4298 // CHECK-RV64-NEXT: entry:
4299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4300 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4301 // CHECK-RV64-NEXT: ret void
4302 //
test_vsoxei8_v_f32m4_m(vbool8_t mask,float * base,vuint8m1_t bindex,vfloat32m4_t value,size_t vl)4303 void test_vsoxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex,
4304 vfloat32m4_t value, size_t vl) {
4305 return vsoxei8_v_f32m4_m(mask, base, bindex, value, vl);
4306 }
4307
4308 // CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8_m(
4309 // CHECK-RV64-NEXT: entry:
4310 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4311 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4312 // CHECK-RV64-NEXT: ret void
4313 //
test_vsoxei8_v_f32m8_m(vbool4_t mask,float * base,vuint8m2_t bindex,vfloat32m8_t value,size_t vl)4314 void test_vsoxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex,
4315 vfloat32m8_t value, size_t vl) {
4316 return vsoxei8_v_f32m8_m(mask, base, bindex, value, vl);
4317 }
4318
4319 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2_m(
4320 // CHECK-RV64-NEXT: entry:
4321 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4322 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4323 // CHECK-RV64-NEXT: ret void
4324 //
test_vsoxei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t value,size_t vl)4325 void test_vsoxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex,
4326 vfloat32mf2_t value, size_t vl) {
4327 return vsoxei16_v_f32mf2_m(mask, base, bindex, value, vl);
4328 }
4329
4330 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1_m(
4331 // CHECK-RV64-NEXT: entry:
4332 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4333 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4334 // CHECK-RV64-NEXT: ret void
4335 //
test_vsoxei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t value,size_t vl)4336 void test_vsoxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex,
4337 vfloat32m1_t value, size_t vl) {
4338 return vsoxei16_v_f32m1_m(mask, base, bindex, value, vl);
4339 }
4340
4341 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2_m(
4342 // CHECK-RV64-NEXT: entry:
4343 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4344 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4345 // CHECK-RV64-NEXT: ret void
4346 //
test_vsoxei16_v_f32m2_m(vbool16_t mask,float * base,vuint16m1_t bindex,vfloat32m2_t value,size_t vl)4347 void test_vsoxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex,
4348 vfloat32m2_t value, size_t vl) {
4349 return vsoxei16_v_f32m2_m(mask, base, bindex, value, vl);
4350 }
4351
4352 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4_m(
4353 // CHECK-RV64-NEXT: entry:
4354 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4355 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4356 // CHECK-RV64-NEXT: ret void
4357 //
test_vsoxei16_v_f32m4_m(vbool8_t mask,float * base,vuint16m2_t bindex,vfloat32m4_t value,size_t vl)4358 void test_vsoxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex,
4359 vfloat32m4_t value, size_t vl) {
4360 return vsoxei16_v_f32m4_m(mask, base, bindex, value, vl);
4361 }
4362
4363 // CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8_m(
4364 // CHECK-RV64-NEXT: entry:
4365 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4366 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4367 // CHECK-RV64-NEXT: ret void
4368 //
test_vsoxei16_v_f32m8_m(vbool4_t mask,float * base,vuint16m4_t bindex,vfloat32m8_t value,size_t vl)4369 void test_vsoxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex,
4370 vfloat32m8_t value, size_t vl) {
4371 return vsoxei16_v_f32m8_m(mask, base, bindex, value, vl);
4372 }
4373
4374 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2_m(
4375 // CHECK-RV64-NEXT: entry:
4376 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4377 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4378 // CHECK-RV64-NEXT: ret void
4379 //
test_vsoxei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t value,size_t vl)4380 void test_vsoxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex,
4381 vfloat32mf2_t value, size_t vl) {
4382 return vsoxei32_v_f32mf2_m(mask, base, bindex, value, vl);
4383 }
4384
4385 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1_m(
4386 // CHECK-RV64-NEXT: entry:
4387 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4388 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4389 // CHECK-RV64-NEXT: ret void
4390 //
test_vsoxei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t value,size_t vl)4391 void test_vsoxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex,
4392 vfloat32m1_t value, size_t vl) {
4393 return vsoxei32_v_f32m1_m(mask, base, bindex, value, vl);
4394 }
4395
4396 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2_m(
4397 // CHECK-RV64-NEXT: entry:
4398 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4399 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4400 // CHECK-RV64-NEXT: ret void
4401 //
test_vsoxei32_v_f32m2_m(vbool16_t mask,float * base,vuint32m2_t bindex,vfloat32m2_t value,size_t vl)4402 void test_vsoxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex,
4403 vfloat32m2_t value, size_t vl) {
4404 return vsoxei32_v_f32m2_m(mask, base, bindex, value, vl);
4405 }
4406
4407 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4_m(
4408 // CHECK-RV64-NEXT: entry:
4409 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4410 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4411 // CHECK-RV64-NEXT: ret void
4412 //
test_vsoxei32_v_f32m4_m(vbool8_t mask,float * base,vuint32m4_t bindex,vfloat32m4_t value,size_t vl)4413 void test_vsoxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex,
4414 vfloat32m4_t value, size_t vl) {
4415 return vsoxei32_v_f32m4_m(mask, base, bindex, value, vl);
4416 }
4417
4418 // CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8_m(
4419 // CHECK-RV64-NEXT: entry:
4420 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 16 x float>*
4421 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i64(<vscale x 16 x float> [[VALUE:%.*]], <vscale x 16 x float>* [[TMP0]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4422 // CHECK-RV64-NEXT: ret void
4423 //
test_vsoxei32_v_f32m8_m(vbool4_t mask,float * base,vuint32m8_t bindex,vfloat32m8_t value,size_t vl)4424 void test_vsoxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex,
4425 vfloat32m8_t value, size_t vl) {
4426 return vsoxei32_v_f32m8_m(mask, base, bindex, value, vl);
4427 }
4428
4429 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2_m(
4430 // CHECK-RV64-NEXT: entry:
4431 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 1 x float>*
4432 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[VALUE:%.*]], <vscale x 1 x float>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4433 // CHECK-RV64-NEXT: ret void
4434 //
test_vsoxei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t value,size_t vl)4435 void test_vsoxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex,
4436 vfloat32mf2_t value, size_t vl) {
4437 return vsoxei64_v_f32mf2_m(mask, base, bindex, value, vl);
4438 }
4439
4440 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1_m(
4441 // CHECK-RV64-NEXT: entry:
4442 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 2 x float>*
4443 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[VALUE:%.*]], <vscale x 2 x float>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4444 // CHECK-RV64-NEXT: ret void
4445 //
test_vsoxei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t value,size_t vl)4446 void test_vsoxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex,
4447 vfloat32m1_t value, size_t vl) {
4448 return vsoxei64_v_f32m1_m(mask, base, bindex, value, vl);
4449 }
4450
4451 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2_m(
4452 // CHECK-RV64-NEXT: entry:
4453 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 4 x float>*
4454 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[VALUE:%.*]], <vscale x 4 x float>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4455 // CHECK-RV64-NEXT: ret void
4456 //
test_vsoxei64_v_f32m2_m(vbool16_t mask,float * base,vuint64m4_t bindex,vfloat32m2_t value,size_t vl)4457 void test_vsoxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex,
4458 vfloat32m2_t value, size_t vl) {
4459 return vsoxei64_v_f32m2_m(mask, base, bindex, value, vl);
4460 }
4461
4462 // CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4_m(
4463 // CHECK-RV64-NEXT: entry:
4464 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to <vscale x 8 x float>*
4465 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[VALUE:%.*]], <vscale x 8 x float>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4466 // CHECK-RV64-NEXT: ret void
4467 //
test_vsoxei64_v_f32m4_m(vbool8_t mask,float * base,vuint64m8_t bindex,vfloat32m4_t value,size_t vl)4468 void test_vsoxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex,
4469 vfloat32m4_t value, size_t vl) {
4470 return vsoxei64_v_f32m4_m(mask, base, bindex, value, vl);
4471 }
4472
4473 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1_m(
4474 // CHECK-RV64-NEXT: entry:
4475 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4476 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4477 // CHECK-RV64-NEXT: ret void
4478 //
test_vsoxei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t value,size_t vl)4479 void test_vsoxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex,
4480 vfloat64m1_t value, size_t vl) {
4481 return vsoxei8_v_f64m1_m(mask, base, bindex, value, vl);
4482 }
4483
4484 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2_m(
4485 // CHECK-RV64-NEXT: entry:
4486 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4487 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4488 // CHECK-RV64-NEXT: ret void
4489 //
test_vsoxei8_v_f64m2_m(vbool32_t mask,double * base,vuint8mf4_t bindex,vfloat64m2_t value,size_t vl)4490 void test_vsoxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex,
4491 vfloat64m2_t value, size_t vl) {
4492 return vsoxei8_v_f64m2_m(mask, base, bindex, value, vl);
4493 }
4494
4495 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4_m(
4496 // CHECK-RV64-NEXT: entry:
4497 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4498 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4499 // CHECK-RV64-NEXT: ret void
4500 //
test_vsoxei8_v_f64m4_m(vbool16_t mask,double * base,vuint8mf2_t bindex,vfloat64m4_t value,size_t vl)4501 void test_vsoxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex,
4502 vfloat64m4_t value, size_t vl) {
4503 return vsoxei8_v_f64m4_m(mask, base, bindex, value, vl);
4504 }
4505
4506 // CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8_m(
4507 // CHECK-RV64-NEXT: entry:
4508 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4509 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4510 // CHECK-RV64-NEXT: ret void
4511 //
test_vsoxei8_v_f64m8_m(vbool8_t mask,double * base,vuint8m1_t bindex,vfloat64m8_t value,size_t vl)4512 void test_vsoxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex,
4513 vfloat64m8_t value, size_t vl) {
4514 return vsoxei8_v_f64m8_m(mask, base, bindex, value, vl);
4515 }
4516
4517 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1_m(
4518 // CHECK-RV64-NEXT: entry:
4519 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4520 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4521 // CHECK-RV64-NEXT: ret void
4522 //
test_vsoxei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t value,size_t vl)4523 void test_vsoxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex,
4524 vfloat64m1_t value, size_t vl) {
4525 return vsoxei16_v_f64m1_m(mask, base, bindex, value, vl);
4526 }
4527
4528 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2_m(
4529 // CHECK-RV64-NEXT: entry:
4530 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4531 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4532 // CHECK-RV64-NEXT: ret void
4533 //
test_vsoxei16_v_f64m2_m(vbool32_t mask,double * base,vuint16mf2_t bindex,vfloat64m2_t value,size_t vl)4534 void test_vsoxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex,
4535 vfloat64m2_t value, size_t vl) {
4536 return vsoxei16_v_f64m2_m(mask, base, bindex, value, vl);
4537 }
4538
4539 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4_m(
4540 // CHECK-RV64-NEXT: entry:
4541 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4542 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4543 // CHECK-RV64-NEXT: ret void
4544 //
test_vsoxei16_v_f64m4_m(vbool16_t mask,double * base,vuint16m1_t bindex,vfloat64m4_t value,size_t vl)4545 void test_vsoxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex,
4546 vfloat64m4_t value, size_t vl) {
4547 return vsoxei16_v_f64m4_m(mask, base, bindex, value, vl);
4548 }
4549
4550 // CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8_m(
4551 // CHECK-RV64-NEXT: entry:
4552 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4553 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4554 // CHECK-RV64-NEXT: ret void
4555 //
test_vsoxei16_v_f64m8_m(vbool8_t mask,double * base,vuint16m2_t bindex,vfloat64m8_t value,size_t vl)4556 void test_vsoxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex,
4557 vfloat64m8_t value, size_t vl) {
4558 return vsoxei16_v_f64m8_m(mask, base, bindex, value, vl);
4559 }
4560
4561 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1_m(
4562 // CHECK-RV64-NEXT: entry:
4563 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4564 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4565 // CHECK-RV64-NEXT: ret void
4566 //
test_vsoxei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t value,size_t vl)4567 void test_vsoxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex,
4568 vfloat64m1_t value, size_t vl) {
4569 return vsoxei32_v_f64m1_m(mask, base, bindex, value, vl);
4570 }
4571
4572 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2_m(
4573 // CHECK-RV64-NEXT: entry:
4574 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4575 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4576 // CHECK-RV64-NEXT: ret void
4577 //
test_vsoxei32_v_f64m2_m(vbool32_t mask,double * base,vuint32m1_t bindex,vfloat64m2_t value,size_t vl)4578 void test_vsoxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex,
4579 vfloat64m2_t value, size_t vl) {
4580 return vsoxei32_v_f64m2_m(mask, base, bindex, value, vl);
4581 }
4582
4583 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4_m(
4584 // CHECK-RV64-NEXT: entry:
4585 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4586 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4587 // CHECK-RV64-NEXT: ret void
4588 //
test_vsoxei32_v_f64m4_m(vbool16_t mask,double * base,vuint32m2_t bindex,vfloat64m4_t value,size_t vl)4589 void test_vsoxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex,
4590 vfloat64m4_t value, size_t vl) {
4591 return vsoxei32_v_f64m4_m(mask, base, bindex, value, vl);
4592 }
4593
4594 // CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8_m(
4595 // CHECK-RV64-NEXT: entry:
4596 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4597 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4598 // CHECK-RV64-NEXT: ret void
4599 //
test_vsoxei32_v_f64m8_m(vbool8_t mask,double * base,vuint32m4_t bindex,vfloat64m8_t value,size_t vl)4600 void test_vsoxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex,
4601 vfloat64m8_t value, size_t vl) {
4602 return vsoxei32_v_f64m8_m(mask, base, bindex, value, vl);
4603 }
4604
4605 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1_m(
4606 // CHECK-RV64-NEXT: entry:
4607 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 1 x double>*
4608 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[VALUE:%.*]], <vscale x 1 x double>* [[TMP0]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4609 // CHECK-RV64-NEXT: ret void
4610 //
test_vsoxei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t value,size_t vl)4611 void test_vsoxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex,
4612 vfloat64m1_t value, size_t vl) {
4613 return vsoxei64_v_f64m1_m(mask, base, bindex, value, vl);
4614 }
4615
4616 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2_m(
4617 // CHECK-RV64-NEXT: entry:
4618 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 2 x double>*
4619 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[VALUE:%.*]], <vscale x 2 x double>* [[TMP0]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4620 // CHECK-RV64-NEXT: ret void
4621 //
test_vsoxei64_v_f64m2_m(vbool32_t mask,double * base,vuint64m2_t bindex,vfloat64m2_t value,size_t vl)4622 void test_vsoxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex,
4623 vfloat64m2_t value, size_t vl) {
4624 return vsoxei64_v_f64m2_m(mask, base, bindex, value, vl);
4625 }
4626
4627 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4_m(
4628 // CHECK-RV64-NEXT: entry:
4629 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 4 x double>*
4630 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[VALUE:%.*]], <vscale x 4 x double>* [[TMP0]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4631 // CHECK-RV64-NEXT: ret void
4632 //
test_vsoxei64_v_f64m4_m(vbool16_t mask,double * base,vuint64m4_t bindex,vfloat64m4_t value,size_t vl)4633 void test_vsoxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex,
4634 vfloat64m4_t value, size_t vl) {
4635 return vsoxei64_v_f64m4_m(mask, base, bindex, value, vl);
4636 }
4637
4638 // CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8_m(
4639 // CHECK-RV64-NEXT: entry:
4640 // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to <vscale x 8 x double>*
4641 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i64(<vscale x 8 x double> [[VALUE:%.*]], <vscale x 8 x double>* [[TMP0]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
4642 // CHECK-RV64-NEXT: ret void
4643 //
test_vsoxei64_v_f64m8_m(vbool8_t mask,double * base,vuint64m8_t bindex,vfloat64m8_t value,size_t vl)4644 void test_vsoxei64_v_f64m8_m (vbool8_t mask, double *base, vuint64m8_t bindex, vfloat64m8_t value, size_t vl) {
4645 return vsoxei64_v_f64m8_m(mask, base, bindex, value, vl);
4646 }
4647
4648