1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \
4 // RUN:   -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
5 
6 #include <riscv_vector.h>
7 
8 //
9 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf8_u8mf8(
10 // CHECK-RV64-NEXT:  entry:
11 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
12 //
test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src)13 vuint8mf8_t test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src) {
14   return vreinterpret_v_i8mf8_u8mf8(src);
15 }
16 
17 //
18 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_u8mf4(
19 // CHECK-RV64-NEXT:  entry:
20 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
21 //
test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src)22 vuint8mf4_t test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src) {
23   return vreinterpret_v_i8mf4_u8mf4(src);
24 }
25 
26 //
27 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_u8mf2(
28 // CHECK-RV64-NEXT:  entry:
29 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
30 //
test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src)31 vuint8mf2_t test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src) {
32   return vreinterpret_v_i8mf2_u8mf2(src);
33 }
34 
35 //
36 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_u8m1(
37 // CHECK-RV64-NEXT:  entry:
38 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
39 //
test_vreinterpret_v_i8m1_u8m1(vint8m1_t src)40 vuint8m1_t test_vreinterpret_v_i8m1_u8m1(vint8m1_t src) {
41   return vreinterpret_v_i8m1_u8m1(src);
42 }
43 
44 //
45 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_u8m2(
46 // CHECK-RV64-NEXT:  entry:
47 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
48 //
test_vreinterpret_v_i8m2_u8m2(vint8m2_t src)49 vuint8m2_t test_vreinterpret_v_i8m2_u8m2(vint8m2_t src) {
50   return vreinterpret_v_i8m2_u8m2(src);
51 }
52 
53 //
54 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_u8m4(
55 // CHECK-RV64-NEXT:  entry:
56 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
57 //
test_vreinterpret_v_i8m4_u8m4(vint8m4_t src)58 vuint8m4_t test_vreinterpret_v_i8m4_u8m4(vint8m4_t src) {
59   return vreinterpret_v_i8m4_u8m4(src);
60 }
61 
62 //
63 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_u8m8(
64 // CHECK-RV64-NEXT:  entry:
65 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
66 //
test_vreinterpret_v_i8m8_u8m8(vint8m8_t src)67 vuint8m8_t test_vreinterpret_v_i8m8_u8m8(vint8m8_t src) {
68   return vreinterpret_v_i8m8_u8m8(src);
69 }
70 
71 //
72 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf8_i8mf8(
73 // CHECK-RV64-NEXT:  entry:
74 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
75 //
test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src)76 vint8mf8_t test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src) {
77   return vreinterpret_v_u8mf8_i8mf8(src);
78 }
79 
80 //
81 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_i8mf4(
82 // CHECK-RV64-NEXT:  entry:
83 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
84 //
test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src)85 vint8mf4_t test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src) {
86   return vreinterpret_v_u8mf4_i8mf4(src);
87 }
88 
89 //
90 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_i8mf2(
91 // CHECK-RV64-NEXT:  entry:
92 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
93 //
test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src)94 vint8mf2_t test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src) {
95   return vreinterpret_v_u8mf2_i8mf2(src);
96 }
97 
98 //
99 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_i8m1(
100 // CHECK-RV64-NEXT:  entry:
101 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
102 //
test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src)103 vint8m1_t test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src) {
104   return vreinterpret_v_u8m1_i8m1(src);
105 }
106 
107 //
108 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_i8m2(
109 // CHECK-RV64-NEXT:  entry:
110 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
111 //
test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src)112 vint8m2_t test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src) {
113   return vreinterpret_v_u8m2_i8m2(src);
114 }
115 
116 //
117 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_i8m4(
118 // CHECK-RV64-NEXT:  entry:
119 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
120 //
test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src)121 vint8m4_t test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src) {
122   return vreinterpret_v_u8m4_i8m4(src);
123 }
124 
125 //
126 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_i8m8(
127 // CHECK-RV64-NEXT:  entry:
128 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
129 //
test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src)130 vint8m8_t test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src) {
131   return vreinterpret_v_u8m8_i8m8(src);
132 }
133 
134 //
135 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_u16mf4(
136 // CHECK-RV64-NEXT:  entry:
137 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
138 //
test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src)139 vuint16mf4_t test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src) {
140   return vreinterpret_v_i16mf4_u16mf4(src);
141 }
142 
143 //
144 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_u16mf2(
145 // CHECK-RV64-NEXT:  entry:
146 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
147 //
test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src)148 vuint16mf2_t test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src) {
149   return vreinterpret_v_i16mf2_u16mf2(src);
150 }
151 
152 //
153 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_u16m1(
154 // CHECK-RV64-NEXT:  entry:
155 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
156 //
test_vreinterpret_v_i16m1_u16m1(vint16m1_t src)157 vuint16m1_t test_vreinterpret_v_i16m1_u16m1(vint16m1_t src) {
158   return vreinterpret_v_i16m1_u16m1(src);
159 }
160 
161 //
162 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_u16m2(
163 // CHECK-RV64-NEXT:  entry:
164 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
165 //
test_vreinterpret_v_i16m2_u16m2(vint16m2_t src)166 vuint16m2_t test_vreinterpret_v_i16m2_u16m2(vint16m2_t src) {
167   return vreinterpret_v_i16m2_u16m2(src);
168 }
169 
170 //
171 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_u16m4(
172 // CHECK-RV64-NEXT:  entry:
173 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
174 //
test_vreinterpret_v_i16m4_u16m4(vint16m4_t src)175 vuint16m4_t test_vreinterpret_v_i16m4_u16m4(vint16m4_t src) {
176   return vreinterpret_v_i16m4_u16m4(src);
177 }
178 
179 //
180 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_u16m8(
181 // CHECK-RV64-NEXT:  entry:
182 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
183 //
test_vreinterpret_v_i16m8_u16m8(vint16m8_t src)184 vuint16m8_t test_vreinterpret_v_i16m8_u16m8(vint16m8_t src) {
185   return vreinterpret_v_i16m8_u16m8(src);
186 }
187 
188 //
189 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_i16mf4(
190 // CHECK-RV64-NEXT:  entry:
191 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
192 //
test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src)193 vint16mf4_t test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src) {
194   return vreinterpret_v_u16mf4_i16mf4(src);
195 }
196 
197 //
198 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_i16mf2(
199 // CHECK-RV64-NEXT:  entry:
200 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
201 //
test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src)202 vint16mf2_t test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src) {
203   return vreinterpret_v_u16mf2_i16mf2(src);
204 }
205 
206 //
207 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_i16m1(
208 // CHECK-RV64-NEXT:  entry:
209 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
210 //
test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src)211 vint16m1_t test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src) {
212   return vreinterpret_v_u16m1_i16m1(src);
213 }
214 
215 //
216 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_i16m2(
217 // CHECK-RV64-NEXT:  entry:
218 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
219 //
test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src)220 vint16m2_t test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src) {
221   return vreinterpret_v_u16m2_i16m2(src);
222 }
223 
224 //
225 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_i16m4(
226 // CHECK-RV64-NEXT:  entry:
227 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
228 //
test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src)229 vint16m4_t test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src) {
230   return vreinterpret_v_u16m4_i16m4(src);
231 }
232 
233 //
234 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_i16m8(
235 // CHECK-RV64-NEXT:  entry:
236 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
237 //
test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src)238 vint16m8_t test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src) {
239   return vreinterpret_v_u16m8_i16m8(src);
240 }
241 
242 //
243 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_u32mf2(
244 // CHECK-RV64-NEXT:  entry:
245 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
246 //
test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src)247 vuint32mf2_t test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src) {
248   return vreinterpret_v_i32mf2_u32mf2(src);
249 }
250 
251 //
252 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_u32m1(
253 // CHECK-RV64-NEXT:  entry:
254 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
255 //
test_vreinterpret_v_i32m1_u32m1(vint32m1_t src)256 vuint32m1_t test_vreinterpret_v_i32m1_u32m1(vint32m1_t src) {
257   return vreinterpret_v_i32m1_u32m1(src);
258 }
259 
260 //
261 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_u32m2(
262 // CHECK-RV64-NEXT:  entry:
263 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
264 //
test_vreinterpret_v_i32m2_u32m2(vint32m2_t src)265 vuint32m2_t test_vreinterpret_v_i32m2_u32m2(vint32m2_t src) {
266   return vreinterpret_v_i32m2_u32m2(src);
267 }
268 
269 //
270 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_u32m4(
271 // CHECK-RV64-NEXT:  entry:
272 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
273 //
test_vreinterpret_v_i32m4_u32m4(vint32m4_t src)274 vuint32m4_t test_vreinterpret_v_i32m4_u32m4(vint32m4_t src) {
275   return vreinterpret_v_i32m4_u32m4(src);
276 }
277 
278 //
279 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_u32m8(
280 // CHECK-RV64-NEXT:  entry:
281 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
282 //
test_vreinterpret_v_i32m8_u32m8(vint32m8_t src)283 vuint32m8_t test_vreinterpret_v_i32m8_u32m8(vint32m8_t src) {
284   return vreinterpret_v_i32m8_u32m8(src);
285 }
286 
287 //
288 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_i32mf2(
289 // CHECK-RV64-NEXT:  entry:
290 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
291 //
test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src)292 vint32mf2_t test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src) {
293   return vreinterpret_v_u32mf2_i32mf2(src);
294 }
295 
296 //
297 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_i32m1(
298 // CHECK-RV64-NEXT:  entry:
299 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
300 //
test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src)301 vint32m1_t test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src) {
302   return vreinterpret_v_u32m1_i32m1(src);
303 }
304 
305 //
306 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_i32m2(
307 // CHECK-RV64-NEXT:  entry:
308 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
309 //
test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src)310 vint32m2_t test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src) {
311   return vreinterpret_v_u32m2_i32m2(src);
312 }
313 
314 //
315 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_i32m4(
316 // CHECK-RV64-NEXT:  entry:
317 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
318 //
test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src)319 vint32m4_t test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src) {
320   return vreinterpret_v_u32m4_i32m4(src);
321 }
322 
323 //
324 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_i32m8(
325 // CHECK-RV64-NEXT:  entry:
326 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
327 //
test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src)328 vint32m8_t test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src) {
329   return vreinterpret_v_u32m8_i32m8(src);
330 }
331 
332 //
333 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_i32mf2(
334 // CHECK-RV64-NEXT:  entry:
335 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
336 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
337 //
test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src)338 vint32mf2_t test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src) {
339   return vreinterpret_v_f32mf2_i32mf2(src);
340 }
341 
342 //
343 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_i32m1(
344 // CHECK-RV64-NEXT:  entry:
345 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
346 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
347 //
test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src)348 vint32m1_t test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src) {
349   return vreinterpret_v_f32m1_i32m1(src);
350 }
351 
352 //
353 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_i32m2(
354 // CHECK-RV64-NEXT:  entry:
355 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
356 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
357 //
test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src)358 vint32m2_t test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src) {
359   return vreinterpret_v_f32m2_i32m2(src);
360 }
361 
362 //
363 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_i32m4(
364 // CHECK-RV64-NEXT:  entry:
365 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
366 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
367 //
test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src)368 vint32m4_t test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src) {
369   return vreinterpret_v_f32m4_i32m4(src);
370 }
371 
372 //
373 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_i32m8(
374 // CHECK-RV64-NEXT:  entry:
375 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
376 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
377 //
test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src)378 vint32m8_t test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src) {
379   return vreinterpret_v_f32m8_i32m8(src);
380 }
381 
382 //
383 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_u32mf2(
384 // CHECK-RV64-NEXT:  entry:
385 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
386 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
387 //
test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src)388 vuint32mf2_t test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src) {
389   return vreinterpret_v_f32mf2_u32mf2(src);
390 }
391 
392 //
393 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_u32m1(
394 // CHECK-RV64-NEXT:  entry:
395 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
396 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
397 //
test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src)398 vuint32m1_t test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src) {
399   return vreinterpret_v_f32m1_u32m1(src);
400 }
401 
402 //
403 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_u32m2(
404 // CHECK-RV64-NEXT:  entry:
405 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
406 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
407 //
test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src)408 vuint32m2_t test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src) {
409   return vreinterpret_v_f32m2_u32m2(src);
410 }
411 
412 //
413 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_u32m4(
414 // CHECK-RV64-NEXT:  entry:
415 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
416 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
417 //
test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src)418 vuint32m4_t test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src) {
419   return vreinterpret_v_f32m4_u32m4(src);
420 }
421 
422 //
423 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_u32m8(
424 // CHECK-RV64-NEXT:  entry:
425 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
426 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
427 //
test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src)428 vuint32m8_t test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src) {
429   return vreinterpret_v_f32m8_u32m8(src);
430 }
431 
432 //
433 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_f32mf2(
434 // CHECK-RV64-NEXT:  entry:
435 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
436 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
437 //
test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src)438 vfloat32mf2_t test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src) {
439   return vreinterpret_v_i32mf2_f32mf2(src);
440 }
441 
442 //
443 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_f32m1(
444 // CHECK-RV64-NEXT:  entry:
445 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
446 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
447 //
test_vreinterpret_v_i32m1_f32m1(vint32m1_t src)448 vfloat32m1_t test_vreinterpret_v_i32m1_f32m1(vint32m1_t src) {
449   return vreinterpret_v_i32m1_f32m1(src);
450 }
451 
452 //
453 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_f32m2(
454 // CHECK-RV64-NEXT:  entry:
455 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
456 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
457 //
test_vreinterpret_v_i32m2_f32m2(vint32m2_t src)458 vfloat32m2_t test_vreinterpret_v_i32m2_f32m2(vint32m2_t src) {
459   return vreinterpret_v_i32m2_f32m2(src);
460 }
461 
462 //
463 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_f32m4(
464 // CHECK-RV64-NEXT:  entry:
465 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
466 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
467 //
test_vreinterpret_v_i32m4_f32m4(vint32m4_t src)468 vfloat32m4_t test_vreinterpret_v_i32m4_f32m4(vint32m4_t src) {
469   return vreinterpret_v_i32m4_f32m4(src);
470 }
471 
472 //
473 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_f32m8(
474 // CHECK-RV64-NEXT:  entry:
475 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
476 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
477 //
test_vreinterpret_v_i32m8_f32m8(vint32m8_t src)478 vfloat32m8_t test_vreinterpret_v_i32m8_f32m8(vint32m8_t src) {
479   return vreinterpret_v_i32m8_f32m8(src);
480 }
481 
482 //
483 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_f32mf2(
484 // CHECK-RV64-NEXT:  entry:
485 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
486 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
487 //
test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src)488 vfloat32mf2_t test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src) {
489   return vreinterpret_v_u32mf2_f32mf2(src);
490 }
491 
492 //
493 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_f32m1(
494 // CHECK-RV64-NEXT:  entry:
495 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
496 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
497 //
test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src)498 vfloat32m1_t test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src) {
499   return vreinterpret_v_u32m1_f32m1(src);
500 }
501 
502 //
503 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_f32m2(
504 // CHECK-RV64-NEXT:  entry:
505 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
506 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
507 //
test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src)508 vfloat32m2_t test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src) {
509   return vreinterpret_v_u32m2_f32m2(src);
510 }
511 
512 //
513 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_f32m4(
514 // CHECK-RV64-NEXT:  entry:
515 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
516 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
517 //
test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src)518 vfloat32m4_t test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src) {
519   return vreinterpret_v_u32m4_f32m4(src);
520 }
521 
522 //
523 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_f32m8(
524 // CHECK-RV64-NEXT:  entry:
525 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
526 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
527 //
test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src)528 vfloat32m8_t test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src) {
529   return vreinterpret_v_u32m8_f32m8(src);
530 }
531 
532 //
533 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_u64m1(
534 // CHECK-RV64-NEXT:  entry:
535 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
536 //
test_vreinterpret_v_i64m1_u64m1(vint64m1_t src)537 vuint64m1_t test_vreinterpret_v_i64m1_u64m1(vint64m1_t src) {
538   return vreinterpret_v_i64m1_u64m1(src);
539 }
540 
541 //
542 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_u64m2(
543 // CHECK-RV64-NEXT:  entry:
544 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
545 //
test_vreinterpret_v_i64m2_u64m2(vint64m2_t src)546 vuint64m2_t test_vreinterpret_v_i64m2_u64m2(vint64m2_t src) {
547   return vreinterpret_v_i64m2_u64m2(src);
548 }
549 
550 //
551 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_u64m4(
552 // CHECK-RV64-NEXT:  entry:
553 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
554 //
test_vreinterpret_v_i64m4_u64m4(vint64m4_t src)555 vuint64m4_t test_vreinterpret_v_i64m4_u64m4(vint64m4_t src) {
556   return vreinterpret_v_i64m4_u64m4(src);
557 }
558 
559 //
560 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_u64m8(
561 // CHECK-RV64-NEXT:  entry:
562 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
563 //
test_vreinterpret_v_i64m8_u64m8(vint64m8_t src)564 vuint64m8_t test_vreinterpret_v_i64m8_u64m8(vint64m8_t src) {
565   return vreinterpret_v_i64m8_u64m8(src);
566 }
567 
568 //
569 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_i64m1(
570 // CHECK-RV64-NEXT:  entry:
571 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
572 //
test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src)573 vint64m1_t test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src) {
574   return vreinterpret_v_u64m1_i64m1(src);
575 }
576 
577 //
578 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_i64m2(
579 // CHECK-RV64-NEXT:  entry:
580 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
581 //
test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src)582 vint64m2_t test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src) {
583   return vreinterpret_v_u64m2_i64m2(src);
584 }
585 
586 //
587 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_i64m4(
588 // CHECK-RV64-NEXT:  entry:
589 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
590 //
test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src)591 vint64m4_t test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src) {
592   return vreinterpret_v_u64m4_i64m4(src);
593 }
594 
595 //
596 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_i64m8(
597 // CHECK-RV64-NEXT:  entry:
598 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
599 //
test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src)600 vint64m8_t test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src) {
601   return vreinterpret_v_u64m8_i64m8(src);
602 }
603 
604 //
605 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_i64m1(
606 // CHECK-RV64-NEXT:  entry:
607 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
608 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
609 //
test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src)610 vint64m1_t test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src) {
611   return vreinterpret_v_f64m1_i64m1(src);
612 }
613 
614 //
615 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_i64m2(
616 // CHECK-RV64-NEXT:  entry:
617 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
618 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
619 //
test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src)620 vint64m2_t test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src) {
621   return vreinterpret_v_f64m2_i64m2(src);
622 }
623 
624 //
625 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_i64m4(
626 // CHECK-RV64-NEXT:  entry:
627 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
628 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
629 //
test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src)630 vint64m4_t test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src) {
631   return vreinterpret_v_f64m4_i64m4(src);
632 }
633 
634 //
635 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_i64m8(
636 // CHECK-RV64-NEXT:  entry:
637 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
638 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
639 //
test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src)640 vint64m8_t test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src) {
641   return vreinterpret_v_f64m8_i64m8(src);
642 }
643 
644 //
645 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_u64m1(
646 // CHECK-RV64-NEXT:  entry:
647 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
648 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
649 //
test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src)650 vuint64m1_t test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src) {
651   return vreinterpret_v_f64m1_u64m1(src);
652 }
653 
654 //
655 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_u64m2(
656 // CHECK-RV64-NEXT:  entry:
657 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
658 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
659 //
test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src)660 vuint64m2_t test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src) {
661   return vreinterpret_v_f64m2_u64m2(src);
662 }
663 
664 //
665 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_u64m4(
666 // CHECK-RV64-NEXT:  entry:
667 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
668 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
669 //
test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src)670 vuint64m4_t test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src) {
671   return vreinterpret_v_f64m4_u64m4(src);
672 }
673 
674 //
675 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_u64m8(
676 // CHECK-RV64-NEXT:  entry:
677 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
678 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
679 //
test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src)680 vuint64m8_t test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src) {
681   return vreinterpret_v_f64m8_u64m8(src);
682 }
683 
684 //
685 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_f64m1(
686 // CHECK-RV64-NEXT:  entry:
687 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
688 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
689 //
test_vreinterpret_v_i64m1_f64m1(vint64m1_t src)690 vfloat64m1_t test_vreinterpret_v_i64m1_f64m1(vint64m1_t src) {
691   return vreinterpret_v_i64m1_f64m1(src);
692 }
693 
694 //
695 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_f64m2(
696 // CHECK-RV64-NEXT:  entry:
697 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
698 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
699 //
test_vreinterpret_v_i64m2_f64m2(vint64m2_t src)700 vfloat64m2_t test_vreinterpret_v_i64m2_f64m2(vint64m2_t src) {
701   return vreinterpret_v_i64m2_f64m2(src);
702 }
703 
704 //
705 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_f64m4(
706 // CHECK-RV64-NEXT:  entry:
707 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
708 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
709 //
test_vreinterpret_v_i64m4_f64m4(vint64m4_t src)710 vfloat64m4_t test_vreinterpret_v_i64m4_f64m4(vint64m4_t src) {
711   return vreinterpret_v_i64m4_f64m4(src);
712 }
713 
714 //
715 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_f64m8(
716 // CHECK-RV64-NEXT:  entry:
717 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
718 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
719 //
test_vreinterpret_v_i64m8_f64m8(vint64m8_t src)720 vfloat64m8_t test_vreinterpret_v_i64m8_f64m8(vint64m8_t src) {
721   return vreinterpret_v_i64m8_f64m8(src);
722 }
723 
724 //
725 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_f64m1(
726 // CHECK-RV64-NEXT:  entry:
727 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
728 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
729 //
test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src)730 vfloat64m1_t test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src) {
731   return vreinterpret_v_u64m1_f64m1(src);
732 }
733 
734 //
735 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_f64m2(
736 // CHECK-RV64-NEXT:  entry:
737 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
738 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
739 //
test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src)740 vfloat64m2_t test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src) {
741   return vreinterpret_v_u64m2_f64m2(src);
742 }
743 
744 //
745 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_f64m4(
746 // CHECK-RV64-NEXT:  entry:
747 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
748 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
749 //
test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src)750 vfloat64m4_t test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src) {
751   return vreinterpret_v_u64m4_f64m4(src);
752 }
753 
754 //
755 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_f64m8(
756 // CHECK-RV64-NEXT:  entry:
757 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
758 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
759 //
test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src)760 vfloat64m8_t test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src) {
761   return vreinterpret_v_u64m8_f64m8(src);
762 }
763 
764 //
765 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_i16mf4(
766 // CHECK-RV64-NEXT:  entry:
767 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
768 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
769 //
test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src)770 vint16mf4_t test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src) {
771   return vreinterpret_v_i8mf4_i16mf4(src);
772 }
773 
774 //
775 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i16mf2(
776 // CHECK-RV64-NEXT:  entry:
777 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
778 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
779 //
test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src)780 vint16mf2_t test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src) {
781   return vreinterpret_v_i8mf2_i16mf2(src);
782 }
783 
784 //
785 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i16m1(
786 // CHECK-RV64-NEXT:  entry:
787 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
788 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
789 //
test_vreinterpret_v_i8m1_i16m1(vint8m1_t src)790 vint16m1_t test_vreinterpret_v_i8m1_i16m1(vint8m1_t src) {
791   return vreinterpret_v_i8m1_i16m1(src);
792 }
793 
794 //
795 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i16m2(
796 // CHECK-RV64-NEXT:  entry:
797 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
798 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
799 //
test_vreinterpret_v_i8m2_i16m2(vint8m2_t src)800 vint16m2_t test_vreinterpret_v_i8m2_i16m2(vint8m2_t src) {
801   return vreinterpret_v_i8m2_i16m2(src);
802 }
803 
804 //
805 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i16m4(
806 // CHECK-RV64-NEXT:  entry:
807 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
808 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
809 //
test_vreinterpret_v_i8m4_i16m4(vint8m4_t src)810 vint16m4_t test_vreinterpret_v_i8m4_i16m4(vint8m4_t src) {
811   return vreinterpret_v_i8m4_i16m4(src);
812 }
813 
814 //
815 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i16m8(
816 // CHECK-RV64-NEXT:  entry:
817 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
818 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
819 //
test_vreinterpret_v_i8m8_i16m8(vint8m8_t src)820 vint16m8_t test_vreinterpret_v_i8m8_i16m8(vint8m8_t src) {
821   return vreinterpret_v_i8m8_i16m8(src);
822 }
823 
824 //
825 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_u16mf4(
826 // CHECK-RV64-NEXT:  entry:
827 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
828 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
829 //
test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src)830 vuint16mf4_t test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src) {
831   return vreinterpret_v_u8mf4_u16mf4(src);
832 }
833 
834 //
835 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u16mf2(
836 // CHECK-RV64-NEXT:  entry:
837 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
838 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
839 //
test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src)840 vuint16mf2_t test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src) {
841   return vreinterpret_v_u8mf2_u16mf2(src);
842 }
843 
844 //
845 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u16m1(
846 // CHECK-RV64-NEXT:  entry:
847 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
848 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
849 //
test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src)850 vuint16m1_t test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src) {
851   return vreinterpret_v_u8m1_u16m1(src);
852 }
853 
854 //
855 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u16m2(
856 // CHECK-RV64-NEXT:  entry:
857 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
858 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
859 //
test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src)860 vuint16m2_t test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src) {
861   return vreinterpret_v_u8m2_u16m2(src);
862 }
863 
864 //
865 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u16m4(
866 // CHECK-RV64-NEXT:  entry:
867 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
868 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
869 //
test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src)870 vuint16m4_t test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src) {
871   return vreinterpret_v_u8m4_u16m4(src);
872 }
873 
874 //
875 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u16m8(
876 // CHECK-RV64-NEXT:  entry:
877 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
878 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
879 //
test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src)880 vuint16m8_t test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src) {
881   return vreinterpret_v_u8m8_u16m8(src);
882 }
883 
884 //
885 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i32mf2(
886 // CHECK-RV64-NEXT:  entry:
887 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
888 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
889 //
test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src)890 vint32mf2_t test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src) {
891   return vreinterpret_v_i8mf2_i32mf2(src);
892 }
893 
894 //
895 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i32m1(
896 // CHECK-RV64-NEXT:  entry:
897 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
898 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
899 //
test_vreinterpret_v_i8m1_i32m1(vint8m1_t src)900 vint32m1_t test_vreinterpret_v_i8m1_i32m1(vint8m1_t src) {
901   return vreinterpret_v_i8m1_i32m1(src);
902 }
903 
904 //
905 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i32m2(
906 // CHECK-RV64-NEXT:  entry:
907 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
908 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
909 //
test_vreinterpret_v_i8m2_i32m2(vint8m2_t src)910 vint32m2_t test_vreinterpret_v_i8m2_i32m2(vint8m2_t src) {
911   return vreinterpret_v_i8m2_i32m2(src);
912 }
913 
914 //
915 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i32m4(
916 // CHECK-RV64-NEXT:  entry:
917 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
918 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
919 //
test_vreinterpret_v_i8m4_i32m4(vint8m4_t src)920 vint32m4_t test_vreinterpret_v_i8m4_i32m4(vint8m4_t src) {
921   return vreinterpret_v_i8m4_i32m4(src);
922 }
923 
924 //
925 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i32m8(
926 // CHECK-RV64-NEXT:  entry:
927 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
928 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
929 //
test_vreinterpret_v_i8m8_i32m8(vint8m8_t src)930 vint32m8_t test_vreinterpret_v_i8m8_i32m8(vint8m8_t src) {
931   return vreinterpret_v_i8m8_i32m8(src);
932 }
933 
934 //
935 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u32mf2(
936 // CHECK-RV64-NEXT:  entry:
937 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
938 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
939 //
test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src)940 vuint32mf2_t test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src) {
941   return vreinterpret_v_u8mf2_u32mf2(src);
942 }
943 
944 //
945 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u32m1(
946 // CHECK-RV64-NEXT:  entry:
947 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
948 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
949 //
test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src)950 vuint32m1_t test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src) {
951   return vreinterpret_v_u8m1_u32m1(src);
952 }
953 
954 //
955 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u32m2(
956 // CHECK-RV64-NEXT:  entry:
957 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
958 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
959 //
test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src)960 vuint32m2_t test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src) {
961   return vreinterpret_v_u8m2_u32m2(src);
962 }
963 
964 //
965 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u32m4(
966 // CHECK-RV64-NEXT:  entry:
967 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
968 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
969 //
test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src)970 vuint32m4_t test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src) {
971   return vreinterpret_v_u8m4_u32m4(src);
972 }
973 
974 //
975 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u32m8(
976 // CHECK-RV64-NEXT:  entry:
977 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
978 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
979 //
test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src)980 vuint32m8_t test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src) {
981   return vreinterpret_v_u8m8_u32m8(src);
982 }
983 
984 //
985 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i64m1(
986 // CHECK-RV64-NEXT:  entry:
987 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
988 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
989 //
test_vreinterpret_v_i8m1_i64m1(vint8m1_t src)990 vint64m1_t test_vreinterpret_v_i8m1_i64m1(vint8m1_t src) {
991   return vreinterpret_v_i8m1_i64m1(src);
992 }
993 
994 //
995 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i64m2(
996 // CHECK-RV64-NEXT:  entry:
997 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
998 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
999 //
test_vreinterpret_v_i8m2_i64m2(vint8m2_t src)1000 vint64m2_t test_vreinterpret_v_i8m2_i64m2(vint8m2_t src) {
1001   return vreinterpret_v_i8m2_i64m2(src);
1002 }
1003 
1004 //
1005 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i64m4(
1006 // CHECK-RV64-NEXT:  entry:
1007 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
1008 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1009 //
test_vreinterpret_v_i8m4_i64m4(vint8m4_t src)1010 vint64m4_t test_vreinterpret_v_i8m4_i64m4(vint8m4_t src) {
1011   return vreinterpret_v_i8m4_i64m4(src);
1012 }
1013 
1014 //
1015 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i64m8(
1016 // CHECK-RV64-NEXT:  entry:
1017 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
1018 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1019 //
test_vreinterpret_v_i8m8_i64m8(vint8m8_t src)1020 vint64m8_t test_vreinterpret_v_i8m8_i64m8(vint8m8_t src) {
1021   return vreinterpret_v_i8m8_i64m8(src);
1022 }
1023 
1024 //
1025 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u64m1(
1026 // CHECK-RV64-NEXT:  entry:
1027 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
1028 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1029 //
test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src)1030 vuint64m1_t test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src) {
1031   return vreinterpret_v_u8m1_u64m1(src);
1032 }
1033 
1034 //
1035 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u64m2(
1036 // CHECK-RV64-NEXT:  entry:
1037 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
1038 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1039 //
test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src)1040 vuint64m2_t test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src) {
1041   return vreinterpret_v_u8m2_u64m2(src);
1042 }
1043 
1044 //
1045 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u64m4(
1046 // CHECK-RV64-NEXT:  entry:
1047 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
1048 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1049 //
test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src)1050 vuint64m4_t test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src) {
1051   return vreinterpret_v_u8m4_u64m4(src);
1052 }
1053 
1054 //
1055 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u64m8(
1056 // CHECK-RV64-NEXT:  entry:
1057 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
1058 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1059 //
test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src)1060 vuint64m8_t test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src) {
1061   return vreinterpret_v_u8m8_u64m8(src);
1062 }
1063 
1064 //
1065 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_i8mf4(
1066 // CHECK-RV64-NEXT:  entry:
1067 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
1068 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
1069 //
test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src)1070 vint8mf4_t test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src) {
1071   return vreinterpret_v_i16mf4_i8mf4(src);
1072 }
1073 
1074 //
1075 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i8mf2(
1076 // CHECK-RV64-NEXT:  entry:
1077 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
1078 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1079 //
test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src)1080 vint8mf2_t test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src) {
1081   return vreinterpret_v_i16mf2_i8mf2(src);
1082 }
1083 
1084 //
1085 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i8m1(
1086 // CHECK-RV64-NEXT:  entry:
1087 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
1088 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1089 //
test_vreinterpret_v_i16m1_i8m1(vint16m1_t src)1090 vint8m1_t test_vreinterpret_v_i16m1_i8m1(vint16m1_t src) {
1091   return vreinterpret_v_i16m1_i8m1(src);
1092 }
1093 
1094 //
1095 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i8m2(
1096 // CHECK-RV64-NEXT:  entry:
1097 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
1098 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1099 //
test_vreinterpret_v_i16m2_i8m2(vint16m2_t src)1100 vint8m2_t test_vreinterpret_v_i16m2_i8m2(vint16m2_t src) {
1101   return vreinterpret_v_i16m2_i8m2(src);
1102 }
1103 
1104 //
1105 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i8m4(
1106 // CHECK-RV64-NEXT:  entry:
1107 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
1108 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1109 //
test_vreinterpret_v_i16m4_i8m4(vint16m4_t src)1110 vint8m4_t test_vreinterpret_v_i16m4_i8m4(vint16m4_t src) {
1111   return vreinterpret_v_i16m4_i8m4(src);
1112 }
1113 
1114 //
1115 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i8m8(
1116 // CHECK-RV64-NEXT:  entry:
1117 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1118 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1119 //
test_vreinterpret_v_i16m8_i8m8(vint16m8_t src)1120 vint8m8_t test_vreinterpret_v_i16m8_i8m8(vint16m8_t src) {
1121   return vreinterpret_v_i16m8_i8m8(src);
1122 }
1123 
1124 //
1125 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_u8mf4(
1126 // CHECK-RV64-NEXT:  entry:
1127 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
1128 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
1129 //
test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src)1130 vuint8mf4_t test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src) {
1131   return vreinterpret_v_u16mf4_u8mf4(src);
1132 }
1133 
1134 //
1135 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u8mf2(
1136 // CHECK-RV64-NEXT:  entry:
1137 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
1138 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1139 //
test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src)1140 vuint8mf2_t test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src) {
1141   return vreinterpret_v_u16mf2_u8mf2(src);
1142 }
1143 
1144 //
1145 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u8m1(
1146 // CHECK-RV64-NEXT:  entry:
1147 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
1148 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1149 //
test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src)1150 vuint8m1_t test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src) {
1151   return vreinterpret_v_u16m1_u8m1(src);
1152 }
1153 
1154 //
1155 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u8m2(
1156 // CHECK-RV64-NEXT:  entry:
1157 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
1158 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1159 //
test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src)1160 vuint8m2_t test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src) {
1161   return vreinterpret_v_u16m2_u8m2(src);
1162 }
1163 
1164 //
1165 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u8m4(
1166 // CHECK-RV64-NEXT:  entry:
1167 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
1168 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1169 //
test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src)1170 vuint8m4_t test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src) {
1171   return vreinterpret_v_u16m4_u8m4(src);
1172 }
1173 
1174 //
1175 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u8m8(
1176 // CHECK-RV64-NEXT:  entry:
1177 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1178 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1179 //
test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src)1180 vuint8m8_t test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src) {
1181   return vreinterpret_v_u16m8_u8m8(src);
1182 }
1183 
1184 //
1185 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i32mf2(
1186 // CHECK-RV64-NEXT:  entry:
1187 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1188 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1189 //
test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src)1190 vint32mf2_t test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src) {
1191   return vreinterpret_v_i16mf2_i32mf2(src);
1192 }
1193 
1194 //
1195 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i32m1(
1196 // CHECK-RV64-NEXT:  entry:
1197 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1198 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1199 //
test_vreinterpret_v_i16m1_i32m1(vint16m1_t src)1200 vint32m1_t test_vreinterpret_v_i16m1_i32m1(vint16m1_t src) {
1201   return vreinterpret_v_i16m1_i32m1(src);
1202 }
1203 
1204 //
1205 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i32m2(
1206 // CHECK-RV64-NEXT:  entry:
1207 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1208 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1209 //
test_vreinterpret_v_i16m2_i32m2(vint16m2_t src)1210 vint32m2_t test_vreinterpret_v_i16m2_i32m2(vint16m2_t src) {
1211   return vreinterpret_v_i16m2_i32m2(src);
1212 }
1213 
1214 //
1215 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i32m4(
1216 // CHECK-RV64-NEXT:  entry:
1217 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1218 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1219 //
test_vreinterpret_v_i16m4_i32m4(vint16m4_t src)1220 vint32m4_t test_vreinterpret_v_i16m4_i32m4(vint16m4_t src) {
1221   return vreinterpret_v_i16m4_i32m4(src);
1222 }
1223 
1224 //
1225 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i32m8(
1226 // CHECK-RV64-NEXT:  entry:
1227 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1228 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1229 //
test_vreinterpret_v_i16m8_i32m8(vint16m8_t src)1230 vint32m8_t test_vreinterpret_v_i16m8_i32m8(vint16m8_t src) {
1231   return vreinterpret_v_i16m8_i32m8(src);
1232 }
1233 
1234 //
1235 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u32mf2(
1236 // CHECK-RV64-NEXT:  entry:
1237 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1238 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1239 //
test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src)1240 vuint32mf2_t test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src) {
1241   return vreinterpret_v_u16mf2_u32mf2(src);
1242 }
1243 
1244 //
1245 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u32m1(
1246 // CHECK-RV64-NEXT:  entry:
1247 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1248 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1249 //
test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src)1250 vuint32m1_t test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src) {
1251   return vreinterpret_v_u16m1_u32m1(src);
1252 }
1253 
1254 //
1255 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u32m2(
1256 // CHECK-RV64-NEXT:  entry:
1257 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1258 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1259 //
test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src)1260 vuint32m2_t test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src) {
1261   return vreinterpret_v_u16m2_u32m2(src);
1262 }
1263 
1264 //
1265 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u32m4(
1266 // CHECK-RV64-NEXT:  entry:
1267 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1268 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1269 //
test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src)1270 vuint32m4_t test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src) {
1271   return vreinterpret_v_u16m4_u32m4(src);
1272 }
1273 
1274 //
1275 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u32m8(
1276 // CHECK-RV64-NEXT:  entry:
1277 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1278 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1279 //
test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src)1280 vuint32m8_t test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src) {
1281   return vreinterpret_v_u16m8_u32m8(src);
1282 }
1283 
1284 //
1285 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i64m1(
1286 // CHECK-RV64-NEXT:  entry:
1287 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1288 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1289 //
test_vreinterpret_v_i16m1_i64m1(vint16m1_t src)1290 vint64m1_t test_vreinterpret_v_i16m1_i64m1(vint16m1_t src) {
1291   return vreinterpret_v_i16m1_i64m1(src);
1292 }
1293 
1294 //
1295 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i64m2(
1296 // CHECK-RV64-NEXT:  entry:
1297 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1298 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1299 //
test_vreinterpret_v_i16m2_i64m2(vint16m2_t src)1300 vint64m2_t test_vreinterpret_v_i16m2_i64m2(vint16m2_t src) {
1301   return vreinterpret_v_i16m2_i64m2(src);
1302 }
1303 
1304 //
1305 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i64m4(
1306 // CHECK-RV64-NEXT:  entry:
1307 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1308 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1309 //
test_vreinterpret_v_i16m4_i64m4(vint16m4_t src)1310 vint64m4_t test_vreinterpret_v_i16m4_i64m4(vint16m4_t src) {
1311   return vreinterpret_v_i16m4_i64m4(src);
1312 }
1313 
1314 //
1315 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i64m8(
1316 // CHECK-RV64-NEXT:  entry:
1317 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1318 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1319 //
test_vreinterpret_v_i16m8_i64m8(vint16m8_t src)1320 vint64m8_t test_vreinterpret_v_i16m8_i64m8(vint16m8_t src) {
1321   return vreinterpret_v_i16m8_i64m8(src);
1322 }
1323 
1324 //
1325 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u64m1(
1326 // CHECK-RV64-NEXT:  entry:
1327 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1328 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1329 //
test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src)1330 vuint64m1_t test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src) {
1331   return vreinterpret_v_u16m1_u64m1(src);
1332 }
1333 
1334 //
1335 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u64m2(
1336 // CHECK-RV64-NEXT:  entry:
1337 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1338 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1339 //
test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src)1340 vuint64m2_t test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src) {
1341   return vreinterpret_v_u16m2_u64m2(src);
1342 }
1343 
1344 //
1345 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u64m4(
1346 // CHECK-RV64-NEXT:  entry:
1347 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1348 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1349 //
test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src)1350 vuint64m4_t test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src) {
1351   return vreinterpret_v_u16m4_u64m4(src);
1352 }
1353 
1354 //
1355 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u64m8(
1356 // CHECK-RV64-NEXT:  entry:
1357 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1358 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1359 //
test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src)1360 vuint64m8_t test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src) {
1361   return vreinterpret_v_u16m8_u64m8(src);
1362 }
1363 
1364 //
1365 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i8mf2(
1366 // CHECK-RV64-NEXT:  entry:
1367 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1368 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1369 //
test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src)1370 vint8mf2_t test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src) {
1371   return vreinterpret_v_i32mf2_i8mf2(src);
1372 }
1373 
1374 //
1375 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i8m1(
1376 // CHECK-RV64-NEXT:  entry:
1377 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1378 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1379 //
test_vreinterpret_v_i32m1_i8m1(vint32m1_t src)1380 vint8m1_t test_vreinterpret_v_i32m1_i8m1(vint32m1_t src) {
1381   return vreinterpret_v_i32m1_i8m1(src);
1382 }
1383 
1384 //
1385 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i8m2(
1386 // CHECK-RV64-NEXT:  entry:
1387 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1388 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1389 //
test_vreinterpret_v_i32m2_i8m2(vint32m2_t src)1390 vint8m2_t test_vreinterpret_v_i32m2_i8m2(vint32m2_t src) {
1391   return vreinterpret_v_i32m2_i8m2(src);
1392 }
1393 
1394 //
1395 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i8m4(
1396 // CHECK-RV64-NEXT:  entry:
1397 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1398 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1399 //
test_vreinterpret_v_i32m4_i8m4(vint32m4_t src)1400 vint8m4_t test_vreinterpret_v_i32m4_i8m4(vint32m4_t src) {
1401   return vreinterpret_v_i32m4_i8m4(src);
1402 }
1403 
1404 //
1405 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i8m8(
1406 // CHECK-RV64-NEXT:  entry:
1407 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1408 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1409 //
test_vreinterpret_v_i32m8_i8m8(vint32m8_t src)1410 vint8m8_t test_vreinterpret_v_i32m8_i8m8(vint32m8_t src) {
1411   return vreinterpret_v_i32m8_i8m8(src);
1412 }
1413 
1414 //
1415 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u8mf2(
1416 // CHECK-RV64-NEXT:  entry:
1417 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1418 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1419 //
test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src)1420 vuint8mf2_t test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src) {
1421   return vreinterpret_v_u32mf2_u8mf2(src);
1422 }
1423 
1424 //
1425 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u8m1(
1426 // CHECK-RV64-NEXT:  entry:
1427 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1428 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1429 //
test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src)1430 vuint8m1_t test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src) {
1431   return vreinterpret_v_u32m1_u8m1(src);
1432 }
1433 
1434 //
1435 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u8m2(
1436 // CHECK-RV64-NEXT:  entry:
1437 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1438 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1439 //
test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src)1440 vuint8m2_t test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src) {
1441   return vreinterpret_v_u32m2_u8m2(src);
1442 }
1443 
1444 //
1445 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u8m4(
1446 // CHECK-RV64-NEXT:  entry:
1447 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1448 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1449 //
test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src)1450 vuint8m4_t test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src) {
1451   return vreinterpret_v_u32m4_u8m4(src);
1452 }
1453 
1454 //
1455 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u8m8(
1456 // CHECK-RV64-NEXT:  entry:
1457 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1458 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1459 //
test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src)1460 vuint8m8_t test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src) {
1461   return vreinterpret_v_u32m8_u8m8(src);
1462 }
1463 
1464 //
1465 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i16mf2(
1466 // CHECK-RV64-NEXT:  entry:
1467 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1468 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1469 //
test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src)1470 vint16mf2_t test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src) {
1471   return vreinterpret_v_i32mf2_i16mf2(src);
1472 }
1473 
1474 //
1475 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i16m1(
1476 // CHECK-RV64-NEXT:  entry:
1477 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1478 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1479 //
test_vreinterpret_v_i32m1_i16m1(vint32m1_t src)1480 vint16m1_t test_vreinterpret_v_i32m1_i16m1(vint32m1_t src) {
1481   return vreinterpret_v_i32m1_i16m1(src);
1482 }
1483 
1484 //
1485 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i16m2(
1486 // CHECK-RV64-NEXT:  entry:
1487 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1488 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1489 //
test_vreinterpret_v_i32m2_i16m2(vint32m2_t src)1490 vint16m2_t test_vreinterpret_v_i32m2_i16m2(vint32m2_t src) {
1491   return vreinterpret_v_i32m2_i16m2(src);
1492 }
1493 
1494 //
1495 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i16m4(
1496 // CHECK-RV64-NEXT:  entry:
1497 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1498 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1499 //
test_vreinterpret_v_i32m4_i16m4(vint32m4_t src)1500 vint16m4_t test_vreinterpret_v_i32m4_i16m4(vint32m4_t src) {
1501   return vreinterpret_v_i32m4_i16m4(src);
1502 }
1503 
1504 //
1505 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i16m8(
1506 // CHECK-RV64-NEXT:  entry:
1507 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1508 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1509 //
test_vreinterpret_v_i32m8_i16m8(vint32m8_t src)1510 vint16m8_t test_vreinterpret_v_i32m8_i16m8(vint32m8_t src) {
1511   return vreinterpret_v_i32m8_i16m8(src);
1512 }
1513 
1514 //
1515 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u16mf2(
1516 // CHECK-RV64-NEXT:  entry:
1517 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1518 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1519 //
test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src)1520 vuint16mf2_t test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src) {
1521   return vreinterpret_v_u32mf2_u16mf2(src);
1522 }
1523 
1524 //
1525 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u16m1(
1526 // CHECK-RV64-NEXT:  entry:
1527 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1528 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1529 //
test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src)1530 vuint16m1_t test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src) {
1531   return vreinterpret_v_u32m1_u16m1(src);
1532 }
1533 
1534 //
1535 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u16m2(
1536 // CHECK-RV64-NEXT:  entry:
1537 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1538 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1539 //
test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src)1540 vuint16m2_t test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src) {
1541   return vreinterpret_v_u32m2_u16m2(src);
1542 }
1543 
1544 //
1545 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u16m4(
1546 // CHECK-RV64-NEXT:  entry:
1547 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1548 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1549 //
test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src)1550 vuint16m4_t test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src) {
1551   return vreinterpret_v_u32m4_u16m4(src);
1552 }
1553 
1554 //
1555 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u16m8(
1556 // CHECK-RV64-NEXT:  entry:
1557 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1558 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1559 //
test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src)1560 vuint16m8_t test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src) {
1561   return vreinterpret_v_u32m8_u16m8(src);
1562 }
1563 
1564 //
1565 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i64m1(
1566 // CHECK-RV64-NEXT:  entry:
1567 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1568 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1569 //
test_vreinterpret_v_i32m1_i64m1(vint32m1_t src)1570 vint64m1_t test_vreinterpret_v_i32m1_i64m1(vint32m1_t src) {
1571   return vreinterpret_v_i32m1_i64m1(src);
1572 }
1573 
1574 //
1575 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i64m2(
1576 // CHECK-RV64-NEXT:  entry:
1577 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1578 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1579 //
test_vreinterpret_v_i32m2_i64m2(vint32m2_t src)1580 vint64m2_t test_vreinterpret_v_i32m2_i64m2(vint32m2_t src) {
1581   return vreinterpret_v_i32m2_i64m2(src);
1582 }
1583 
1584 //
1585 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i64m4(
1586 // CHECK-RV64-NEXT:  entry:
1587 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1588 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1589 //
test_vreinterpret_v_i32m4_i64m4(vint32m4_t src)1590 vint64m4_t test_vreinterpret_v_i32m4_i64m4(vint32m4_t src) {
1591   return vreinterpret_v_i32m4_i64m4(src);
1592 }
1593 
1594 //
1595 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i64m8(
1596 // CHECK-RV64-NEXT:  entry:
1597 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1598 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1599 //
test_vreinterpret_v_i32m8_i64m8(vint32m8_t src)1600 vint64m8_t test_vreinterpret_v_i32m8_i64m8(vint32m8_t src) {
1601   return vreinterpret_v_i32m8_i64m8(src);
1602 }
1603 
1604 //
1605 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u64m1(
1606 // CHECK-RV64-NEXT:  entry:
1607 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1608 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1609 //
test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src)1610 vuint64m1_t test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src) {
1611   return vreinterpret_v_u32m1_u64m1(src);
1612 }
1613 
1614 //
1615 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u64m2(
1616 // CHECK-RV64-NEXT:  entry:
1617 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1618 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1619 //
test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src)1620 vuint64m2_t test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src) {
1621   return vreinterpret_v_u32m2_u64m2(src);
1622 }
1623 
1624 //
1625 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u64m4(
1626 // CHECK-RV64-NEXT:  entry:
1627 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1628 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1629 //
test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src)1630 vuint64m4_t test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src) {
1631   return vreinterpret_v_u32m4_u64m4(src);
1632 }
1633 
1634 //
1635 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u64m8(
1636 // CHECK-RV64-NEXT:  entry:
1637 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1638 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1639 //
test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src)1640 vuint64m8_t test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src) {
1641   return vreinterpret_v_u32m8_u64m8(src);
1642 }
1643 
1644 //
1645 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i8m1(
1646 // CHECK-RV64-NEXT:  entry:
1647 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1648 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1649 //
test_vreinterpret_v_i64m1_i8m1(vint64m1_t src)1650 vint8m1_t test_vreinterpret_v_i64m1_i8m1(vint64m1_t src) {
1651   return vreinterpret_v_i64m1_i8m1(src);
1652 }
1653 
1654 //
1655 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i8m2(
1656 // CHECK-RV64-NEXT:  entry:
1657 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1658 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1659 //
test_vreinterpret_v_i64m2_i8m2(vint64m2_t src)1660 vint8m2_t test_vreinterpret_v_i64m2_i8m2(vint64m2_t src) {
1661   return vreinterpret_v_i64m2_i8m2(src);
1662 }
1663 
1664 //
1665 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i8m4(
1666 // CHECK-RV64-NEXT:  entry:
1667 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1668 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1669 //
test_vreinterpret_v_i64m4_i8m4(vint64m4_t src)1670 vint8m4_t test_vreinterpret_v_i64m4_i8m4(vint64m4_t src) {
1671   return vreinterpret_v_i64m4_i8m4(src);
1672 }
1673 
1674 //
1675 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i8m8(
1676 // CHECK-RV64-NEXT:  entry:
1677 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1678 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1679 //
test_vreinterpret_v_i64m8_i8m8(vint64m8_t src)1680 vint8m8_t test_vreinterpret_v_i64m8_i8m8(vint64m8_t src) {
1681   return vreinterpret_v_i64m8_i8m8(src);
1682 }
1683 
1684 //
1685 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u8m1(
1686 // CHECK-RV64-NEXT:  entry:
1687 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1688 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1689 //
test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src)1690 vuint8m1_t test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src) {
1691   return vreinterpret_v_u64m1_u8m1(src);
1692 }
1693 
1694 //
1695 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u8m2(
1696 // CHECK-RV64-NEXT:  entry:
1697 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1698 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1699 //
test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src)1700 vuint8m2_t test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src) {
1701   return vreinterpret_v_u64m2_u8m2(src);
1702 }
1703 
1704 //
1705 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u8m4(
1706 // CHECK-RV64-NEXT:  entry:
1707 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1708 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1709 //
test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src)1710 vuint8m4_t test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src) {
1711   return vreinterpret_v_u64m4_u8m4(src);
1712 }
1713 
1714 //
1715 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u8m8(
1716 // CHECK-RV64-NEXT:  entry:
1717 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1718 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1719 //
test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src)1720 vuint8m8_t test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src) {
1721   return vreinterpret_v_u64m8_u8m8(src);
1722 }
1723 
1724 //
1725 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i16m1(
1726 // CHECK-RV64-NEXT:  entry:
1727 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1728 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1729 //
test_vreinterpret_v_i64m1_i16m1(vint64m1_t src)1730 vint16m1_t test_vreinterpret_v_i64m1_i16m1(vint64m1_t src) {
1731   return vreinterpret_v_i64m1_i16m1(src);
1732 }
1733 
1734 //
1735 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i16m2(
1736 // CHECK-RV64-NEXT:  entry:
1737 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1738 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1739 //
test_vreinterpret_v_i64m2_i16m2(vint64m2_t src)1740 vint16m2_t test_vreinterpret_v_i64m2_i16m2(vint64m2_t src) {
1741   return vreinterpret_v_i64m2_i16m2(src);
1742 }
1743 
1744 //
1745 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i16m4(
1746 // CHECK-RV64-NEXT:  entry:
1747 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1748 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1749 //
test_vreinterpret_v_i64m4_i16m4(vint64m4_t src)1750 vint16m4_t test_vreinterpret_v_i64m4_i16m4(vint64m4_t src) {
1751   return vreinterpret_v_i64m4_i16m4(src);
1752 }
1753 
1754 //
1755 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i16m8(
1756 // CHECK-RV64-NEXT:  entry:
1757 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1758 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1759 //
test_vreinterpret_v_i64m8_i16m8(vint64m8_t src)1760 vint16m8_t test_vreinterpret_v_i64m8_i16m8(vint64m8_t src) {
1761   return vreinterpret_v_i64m8_i16m8(src);
1762 }
1763 
1764 //
1765 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u16m1(
1766 // CHECK-RV64-NEXT:  entry:
1767 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1768 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1769 //
test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src)1770 vuint16m1_t test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src) {
1771   return vreinterpret_v_u64m1_u16m1(src);
1772 }
1773 
1774 //
1775 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u16m2(
1776 // CHECK-RV64-NEXT:  entry:
1777 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1778 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1779 //
test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src)1780 vuint16m2_t test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src) {
1781   return vreinterpret_v_u64m2_u16m2(src);
1782 }
1783 
1784 //
1785 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u16m4(
1786 // CHECK-RV64-NEXT:  entry:
1787 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1788 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1789 //
test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src)1790 vuint16m4_t test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src) {
1791   return vreinterpret_v_u64m4_u16m4(src);
1792 }
1793 
1794 //
1795 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u16m8(
1796 // CHECK-RV64-NEXT:  entry:
1797 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1798 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1799 //
test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src)1800 vuint16m8_t test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src) {
1801   return vreinterpret_v_u64m8_u16m8(src);
1802 }
1803 
1804 //
1805 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i32m1(
1806 // CHECK-RV64-NEXT:  entry:
1807 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1808 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1809 //
test_vreinterpret_v_i64m1_i32m1(vint64m1_t src)1810 vint32m1_t test_vreinterpret_v_i64m1_i32m1(vint64m1_t src) {
1811   return vreinterpret_v_i64m1_i32m1(src);
1812 }
1813 
1814 //
1815 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i32m2(
1816 // CHECK-RV64-NEXT:  entry:
1817 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1818 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1819 //
test_vreinterpret_v_i64m2_i32m2(vint64m2_t src)1820 vint32m2_t test_vreinterpret_v_i64m2_i32m2(vint64m2_t src) {
1821   return vreinterpret_v_i64m2_i32m2(src);
1822 }
1823 
1824 //
1825 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i32m4(
1826 // CHECK-RV64-NEXT:  entry:
1827 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1828 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1829 //
test_vreinterpret_v_i64m4_i32m4(vint64m4_t src)1830 vint32m4_t test_vreinterpret_v_i64m4_i32m4(vint64m4_t src) {
1831   return vreinterpret_v_i64m4_i32m4(src);
1832 }
1833 
1834 //
1835 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i32m8(
1836 // CHECK-RV64-NEXT:  entry:
1837 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1838 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1839 //
test_vreinterpret_v_i64m8_i32m8(vint64m8_t src)1840 vint32m8_t test_vreinterpret_v_i64m8_i32m8(vint64m8_t src) {
1841   return vreinterpret_v_i64m8_i32m8(src);
1842 }
1843 
1844 //
1845 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u32m1(
1846 // CHECK-RV64-NEXT:  entry:
1847 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1848 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1849 //
test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src)1850 vuint32m1_t test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src) {
1851   return vreinterpret_v_u64m1_u32m1(src);
1852 }
1853 
1854 //
1855 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u32m2(
1856 // CHECK-RV64-NEXT:  entry:
1857 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1858 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1859 //
test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src)1860 vuint32m2_t test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src) {
1861   return vreinterpret_v_u64m2_u32m2(src);
1862 }
1863 
1864 //
1865 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u32m4(
1866 // CHECK-RV64-NEXT:  entry:
1867 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1868 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1869 //
test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src)1870 vuint32m4_t test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src) {
1871   return vreinterpret_v_u64m4_u32m4(src);
1872 }
1873 
1874 //
1875 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u32m8(
1876 // CHECK-RV64-NEXT:  entry:
1877 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1878 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1879 //
test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src)1880 vuint32m8_t test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src) {
1881   return vreinterpret_v_u64m8_u32m8(src);
1882 }
1883