1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \
4 // RUN:   -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
5 
6 #include <riscv_vector.h>
7 
8 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf8_u8mf8(
9 // CHECK-RV64-NEXT:  entry:
10 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
11 //
test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src)12 vuint8mf8_t test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src) {
13   return vreinterpret_u8mf8(src);
14 }
15 
16 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_u8mf4(
17 // CHECK-RV64-NEXT:  entry:
18 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
19 //
test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src)20 vuint8mf4_t test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src) {
21   return vreinterpret_u8mf4(src);
22 }
23 
24 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_u8mf2(
25 // CHECK-RV64-NEXT:  entry:
26 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
27 //
test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src)28 vuint8mf2_t test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src) {
29   return vreinterpret_u8mf2(src);
30 }
31 
32 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_u8m1(
33 // CHECK-RV64-NEXT:  entry:
34 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
35 //
test_vreinterpret_v_i8m1_u8m1(vint8m1_t src)36 vuint8m1_t test_vreinterpret_v_i8m1_u8m1(vint8m1_t src) {
37   return vreinterpret_u8m1(src);
38 }
39 
40 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_u8m2(
41 // CHECK-RV64-NEXT:  entry:
42 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
43 //
test_vreinterpret_v_i8m2_u8m2(vint8m2_t src)44 vuint8m2_t test_vreinterpret_v_i8m2_u8m2(vint8m2_t src) {
45   return vreinterpret_u8m2(src);
46 }
47 
48 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_u8m4(
49 // CHECK-RV64-NEXT:  entry:
50 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
51 //
test_vreinterpret_v_i8m4_u8m4(vint8m4_t src)52 vuint8m4_t test_vreinterpret_v_i8m4_u8m4(vint8m4_t src) {
53   return vreinterpret_u8m4(src);
54 }
55 
56 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_u8m8(
57 // CHECK-RV64-NEXT:  entry:
58 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
59 //
test_vreinterpret_v_i8m8_u8m8(vint8m8_t src)60 vuint8m8_t test_vreinterpret_v_i8m8_u8m8(vint8m8_t src) {
61   return vreinterpret_u8m8(src);
62 }
63 
64 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf8_i8mf8(
65 // CHECK-RV64-NEXT:  entry:
66 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
67 //
test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src)68 vint8mf8_t test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src) {
69   return vreinterpret_i8mf8(src);
70 }
71 
72 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_i8mf4(
73 // CHECK-RV64-NEXT:  entry:
74 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
75 //
test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src)76 vint8mf4_t test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src) {
77   return vreinterpret_i8mf4(src);
78 }
79 
80 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_i8mf2(
81 // CHECK-RV64-NEXT:  entry:
82 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
83 //
test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src)84 vint8mf2_t test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src) {
85   return vreinterpret_i8mf2(src);
86 }
87 
88 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_i8m1(
89 // CHECK-RV64-NEXT:  entry:
90 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
91 //
test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src)92 vint8m1_t test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src) {
93   return vreinterpret_i8m1(src);
94 }
95 
96 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_i8m2(
97 // CHECK-RV64-NEXT:  entry:
98 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
99 //
test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src)100 vint8m2_t test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src) {
101   return vreinterpret_i8m2(src);
102 }
103 
104 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_i8m4(
105 // CHECK-RV64-NEXT:  entry:
106 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
107 //
test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src)108 vint8m4_t test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src) {
109   return vreinterpret_i8m4(src);
110 }
111 
112 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_i8m8(
113 // CHECK-RV64-NEXT:  entry:
114 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
115 //
test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src)116 vint8m8_t test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src) {
117   return vreinterpret_i8m8(src);
118 }
119 
120 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_u16mf4(
121 // CHECK-RV64-NEXT:  entry:
122 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
123 //
test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src)124 vuint16mf4_t test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src) {
125   return vreinterpret_u16mf4(src);
126 }
127 
128 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_u16mf2(
129 // CHECK-RV64-NEXT:  entry:
130 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
131 //
test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src)132 vuint16mf2_t test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src) {
133   return vreinterpret_u16mf2(src);
134 }
135 
136 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_u16m1(
137 // CHECK-RV64-NEXT:  entry:
138 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
139 //
test_vreinterpret_v_i16m1_u16m1(vint16m1_t src)140 vuint16m1_t test_vreinterpret_v_i16m1_u16m1(vint16m1_t src) {
141   return vreinterpret_u16m1(src);
142 }
143 
144 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_u16m2(
145 // CHECK-RV64-NEXT:  entry:
146 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
147 //
test_vreinterpret_v_i16m2_u16m2(vint16m2_t src)148 vuint16m2_t test_vreinterpret_v_i16m2_u16m2(vint16m2_t src) {
149   return vreinterpret_u16m2(src);
150 }
151 
152 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_u16m4(
153 // CHECK-RV64-NEXT:  entry:
154 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
155 //
test_vreinterpret_v_i16m4_u16m4(vint16m4_t src)156 vuint16m4_t test_vreinterpret_v_i16m4_u16m4(vint16m4_t src) {
157   return vreinterpret_u16m4(src);
158 }
159 
160 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_u16m8(
161 // CHECK-RV64-NEXT:  entry:
162 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
163 //
test_vreinterpret_v_i16m8_u16m8(vint16m8_t src)164 vuint16m8_t test_vreinterpret_v_i16m8_u16m8(vint16m8_t src) {
165   return vreinterpret_u16m8(src);
166 }
167 
168 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_i16mf4(
169 // CHECK-RV64-NEXT:  entry:
170 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
171 //
test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src)172 vint16mf4_t test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src) {
173   return vreinterpret_i16mf4(src);
174 }
175 
176 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_i16mf2(
177 // CHECK-RV64-NEXT:  entry:
178 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
179 //
test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src)180 vint16mf2_t test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src) {
181   return vreinterpret_i16mf2(src);
182 }
183 
184 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_i16m1(
185 // CHECK-RV64-NEXT:  entry:
186 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
187 //
test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src)188 vint16m1_t test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src) {
189   return vreinterpret_i16m1(src);
190 }
191 
192 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_i16m2(
193 // CHECK-RV64-NEXT:  entry:
194 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
195 //
test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src)196 vint16m2_t test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src) {
197   return vreinterpret_i16m2(src);
198 }
199 
200 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_i16m4(
201 // CHECK-RV64-NEXT:  entry:
202 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
203 //
test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src)204 vint16m4_t test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src) {
205   return vreinterpret_i16m4(src);
206 }
207 
208 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_i16m8(
209 // CHECK-RV64-NEXT:  entry:
210 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
211 //
test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src)212 vint16m8_t test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src) {
213   return vreinterpret_i16m8(src);
214 }
215 
216 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_u32mf2(
217 // CHECK-RV64-NEXT:  entry:
218 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
219 //
test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src)220 vuint32mf2_t test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src) {
221   return vreinterpret_u32mf2(src);
222 }
223 
224 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_u32m1(
225 // CHECK-RV64-NEXT:  entry:
226 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
227 //
test_vreinterpret_v_i32m1_u32m1(vint32m1_t src)228 vuint32m1_t test_vreinterpret_v_i32m1_u32m1(vint32m1_t src) {
229   return vreinterpret_u32m1(src);
230 }
231 
232 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_u32m2(
233 // CHECK-RV64-NEXT:  entry:
234 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
235 //
test_vreinterpret_v_i32m2_u32m2(vint32m2_t src)236 vuint32m2_t test_vreinterpret_v_i32m2_u32m2(vint32m2_t src) {
237   return vreinterpret_u32m2(src);
238 }
239 
240 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_u32m4(
241 // CHECK-RV64-NEXT:  entry:
242 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
243 //
test_vreinterpret_v_i32m4_u32m4(vint32m4_t src)244 vuint32m4_t test_vreinterpret_v_i32m4_u32m4(vint32m4_t src) {
245   return vreinterpret_u32m4(src);
246 }
247 
248 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_u32m8(
249 // CHECK-RV64-NEXT:  entry:
250 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
251 //
test_vreinterpret_v_i32m8_u32m8(vint32m8_t src)252 vuint32m8_t test_vreinterpret_v_i32m8_u32m8(vint32m8_t src) {
253   return vreinterpret_u32m8(src);
254 }
255 
256 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_i32mf2(
257 // CHECK-RV64-NEXT:  entry:
258 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
259 //
test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src)260 vint32mf2_t test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src) {
261   return vreinterpret_i32mf2(src);
262 }
263 
264 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_i32m1(
265 // CHECK-RV64-NEXT:  entry:
266 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
267 //
test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src)268 vint32m1_t test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src) {
269   return vreinterpret_i32m1(src);
270 }
271 
272 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_i32m2(
273 // CHECK-RV64-NEXT:  entry:
274 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
275 //
test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src)276 vint32m2_t test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src) {
277   return vreinterpret_i32m2(src);
278 }
279 
280 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_i32m4(
281 // CHECK-RV64-NEXT:  entry:
282 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
283 //
test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src)284 vint32m4_t test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src) {
285   return vreinterpret_i32m4(src);
286 }
287 
288 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_i32m8(
289 // CHECK-RV64-NEXT:  entry:
290 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
291 //
test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src)292 vint32m8_t test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src) {
293   return vreinterpret_i32m8(src);
294 }
295 
296 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_i32mf2(
297 // CHECK-RV64-NEXT:  entry:
298 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
299 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
300 //
test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src)301 vint32mf2_t test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src) {
302   return vreinterpret_i32mf2(src);
303 }
304 
305 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_i32m1(
306 // CHECK-RV64-NEXT:  entry:
307 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
308 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
309 //
test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src)310 vint32m1_t test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src) {
311   return vreinterpret_i32m1(src);
312 }
313 
314 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_i32m2(
315 // CHECK-RV64-NEXT:  entry:
316 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
317 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
318 //
test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src)319 vint32m2_t test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src) {
320   return vreinterpret_i32m2(src);
321 }
322 
323 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_i32m4(
324 // CHECK-RV64-NEXT:  entry:
325 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
326 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
327 //
test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src)328 vint32m4_t test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src) {
329   return vreinterpret_i32m4(src);
330 }
331 
332 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_i32m8(
333 // CHECK-RV64-NEXT:  entry:
334 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
335 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
336 //
test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src)337 vint32m8_t test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src) {
338   return vreinterpret_i32m8(src);
339 }
340 
341 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_u32mf2(
342 // CHECK-RV64-NEXT:  entry:
343 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
344 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
345 //
test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src)346 vuint32mf2_t test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src) {
347   return vreinterpret_u32mf2(src);
348 }
349 
350 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_u32m1(
351 // CHECK-RV64-NEXT:  entry:
352 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
353 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
354 //
test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src)355 vuint32m1_t test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src) {
356   return vreinterpret_u32m1(src);
357 }
358 
359 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_u32m2(
360 // CHECK-RV64-NEXT:  entry:
361 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
362 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
363 //
test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src)364 vuint32m2_t test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src) {
365   return vreinterpret_u32m2(src);
366 }
367 
368 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_u32m4(
369 // CHECK-RV64-NEXT:  entry:
370 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
371 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
372 //
test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src)373 vuint32m4_t test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src) {
374   return vreinterpret_u32m4(src);
375 }
376 
377 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_u32m8(
378 // CHECK-RV64-NEXT:  entry:
379 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
380 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
381 //
test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src)382 vuint32m8_t test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src) {
383   return vreinterpret_u32m8(src);
384 }
385 
386 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_f32mf2(
387 // CHECK-RV64-NEXT:  entry:
388 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
389 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
390 //
test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src)391 vfloat32mf2_t test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src) {
392   return vreinterpret_f32mf2(src);
393 }
394 
395 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_f32m1(
396 // CHECK-RV64-NEXT:  entry:
397 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
398 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
399 //
test_vreinterpret_v_i32m1_f32m1(vint32m1_t src)400 vfloat32m1_t test_vreinterpret_v_i32m1_f32m1(vint32m1_t src) {
401   return vreinterpret_f32m1(src);
402 }
403 
404 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_f32m2(
405 // CHECK-RV64-NEXT:  entry:
406 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
407 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
408 //
test_vreinterpret_v_i32m2_f32m2(vint32m2_t src)409 vfloat32m2_t test_vreinterpret_v_i32m2_f32m2(vint32m2_t src) {
410   return vreinterpret_f32m2(src);
411 }
412 
413 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_f32m4(
414 // CHECK-RV64-NEXT:  entry:
415 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
416 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
417 //
test_vreinterpret_v_i32m4_f32m4(vint32m4_t src)418 vfloat32m4_t test_vreinterpret_v_i32m4_f32m4(vint32m4_t src) {
419   return vreinterpret_f32m4(src);
420 }
421 
422 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_f32m8(
423 // CHECK-RV64-NEXT:  entry:
424 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
425 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
426 //
test_vreinterpret_v_i32m8_f32m8(vint32m8_t src)427 vfloat32m8_t test_vreinterpret_v_i32m8_f32m8(vint32m8_t src) {
428   return vreinterpret_f32m8(src);
429 }
430 
431 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_f32mf2(
432 // CHECK-RV64-NEXT:  entry:
433 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
434 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
435 //
test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src)436 vfloat32mf2_t test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src) {
437   return vreinterpret_f32mf2(src);
438 }
439 
440 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_f32m1(
441 // CHECK-RV64-NEXT:  entry:
442 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
443 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
444 //
test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src)445 vfloat32m1_t test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src) {
446   return vreinterpret_f32m1(src);
447 }
448 
449 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_f32m2(
450 // CHECK-RV64-NEXT:  entry:
451 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
452 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
453 //
test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src)454 vfloat32m2_t test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src) {
455   return vreinterpret_f32m2(src);
456 }
457 
458 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_f32m4(
459 // CHECK-RV64-NEXT:  entry:
460 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
461 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
462 //
test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src)463 vfloat32m4_t test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src) {
464   return vreinterpret_f32m4(src);
465 }
466 
467 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_f32m8(
468 // CHECK-RV64-NEXT:  entry:
469 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
470 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
471 //
test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src)472 vfloat32m8_t test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src) {
473   return vreinterpret_f32m8(src);
474 }
475 
476 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_u64m1(
477 // CHECK-RV64-NEXT:  entry:
478 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
479 //
test_vreinterpret_v_i64m1_u64m1(vint64m1_t src)480 vuint64m1_t test_vreinterpret_v_i64m1_u64m1(vint64m1_t src) {
481   return vreinterpret_u64m1(src);
482 }
483 
484 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_u64m2(
485 // CHECK-RV64-NEXT:  entry:
486 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
487 //
test_vreinterpret_v_i64m2_u64m2(vint64m2_t src)488 vuint64m2_t test_vreinterpret_v_i64m2_u64m2(vint64m2_t src) {
489   return vreinterpret_u64m2(src);
490 }
491 
492 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_u64m4(
493 // CHECK-RV64-NEXT:  entry:
494 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
495 //
test_vreinterpret_v_i64m4_u64m4(vint64m4_t src)496 vuint64m4_t test_vreinterpret_v_i64m4_u64m4(vint64m4_t src) {
497   return vreinterpret_u64m4(src);
498 }
499 
500 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_u64m8(
501 // CHECK-RV64-NEXT:  entry:
502 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
503 //
test_vreinterpret_v_i64m8_u64m8(vint64m8_t src)504 vuint64m8_t test_vreinterpret_v_i64m8_u64m8(vint64m8_t src) {
505   return vreinterpret_u64m8(src);
506 }
507 
508 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_i64m1(
509 // CHECK-RV64-NEXT:  entry:
510 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
511 //
test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src)512 vint64m1_t test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src) {
513   return vreinterpret_i64m1(src);
514 }
515 
516 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_i64m2(
517 // CHECK-RV64-NEXT:  entry:
518 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
519 //
test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src)520 vint64m2_t test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src) {
521   return vreinterpret_i64m2(src);
522 }
523 
524 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_i64m4(
525 // CHECK-RV64-NEXT:  entry:
526 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
527 //
test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src)528 vint64m4_t test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src) {
529   return vreinterpret_i64m4(src);
530 }
531 
532 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_i64m8(
533 // CHECK-RV64-NEXT:  entry:
534 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
535 //
test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src)536 vint64m8_t test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src) {
537   return vreinterpret_i64m8(src);
538 }
539 
540 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_i64m1(
541 // CHECK-RV64-NEXT:  entry:
542 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
543 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
544 //
test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src)545 vint64m1_t test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src) {
546   return vreinterpret_i64m1(src);
547 }
548 
549 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_i64m2(
550 // CHECK-RV64-NEXT:  entry:
551 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
552 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
553 //
test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src)554 vint64m2_t test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src) {
555   return vreinterpret_i64m2(src);
556 }
557 
558 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_i64m4(
559 // CHECK-RV64-NEXT:  entry:
560 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
561 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
562 //
test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src)563 vint64m4_t test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src) {
564   return vreinterpret_i64m4(src);
565 }
566 
567 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_i64m8(
568 // CHECK-RV64-NEXT:  entry:
569 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
570 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
571 //
test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src)572 vint64m8_t test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src) {
573   return vreinterpret_i64m8(src);
574 }
575 
576 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_u64m1(
577 // CHECK-RV64-NEXT:  entry:
578 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
579 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
580 //
test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src)581 vuint64m1_t test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src) {
582   return vreinterpret_u64m1(src);
583 }
584 
585 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_u64m2(
586 // CHECK-RV64-NEXT:  entry:
587 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
588 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
589 //
test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src)590 vuint64m2_t test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src) {
591   return vreinterpret_u64m2(src);
592 }
593 
594 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_u64m4(
595 // CHECK-RV64-NEXT:  entry:
596 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
597 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
598 //
test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src)599 vuint64m4_t test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src) {
600   return vreinterpret_u64m4(src);
601 }
602 
603 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_u64m8(
604 // CHECK-RV64-NEXT:  entry:
605 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
606 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
607 //
test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src)608 vuint64m8_t test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src) {
609   return vreinterpret_u64m8(src);
610 }
611 
612 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_f64m1(
613 // CHECK-RV64-NEXT:  entry:
614 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
615 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
616 //
test_vreinterpret_v_i64m1_f64m1(vint64m1_t src)617 vfloat64m1_t test_vreinterpret_v_i64m1_f64m1(vint64m1_t src) {
618   return vreinterpret_f64m1(src);
619 }
620 
621 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_f64m2(
622 // CHECK-RV64-NEXT:  entry:
623 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
624 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
625 //
test_vreinterpret_v_i64m2_f64m2(vint64m2_t src)626 vfloat64m2_t test_vreinterpret_v_i64m2_f64m2(vint64m2_t src) {
627   return vreinterpret_f64m2(src);
628 }
629 
630 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_f64m4(
631 // CHECK-RV64-NEXT:  entry:
632 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
633 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
634 //
test_vreinterpret_v_i64m4_f64m4(vint64m4_t src)635 vfloat64m4_t test_vreinterpret_v_i64m4_f64m4(vint64m4_t src) {
636   return vreinterpret_f64m4(src);
637 }
638 
639 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_f64m8(
640 // CHECK-RV64-NEXT:  entry:
641 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
642 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
643 //
test_vreinterpret_v_i64m8_f64m8(vint64m8_t src)644 vfloat64m8_t test_vreinterpret_v_i64m8_f64m8(vint64m8_t src) {
645   return vreinterpret_f64m8(src);
646 }
647 
648 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_f64m1(
649 // CHECK-RV64-NEXT:  entry:
650 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
651 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
652 //
test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src)653 vfloat64m1_t test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src) {
654   return vreinterpret_f64m1(src);
655 }
656 
657 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_f64m2(
658 // CHECK-RV64-NEXT:  entry:
659 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
660 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
661 //
test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src)662 vfloat64m2_t test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src) {
663   return vreinterpret_f64m2(src);
664 }
665 
666 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_f64m4(
667 // CHECK-RV64-NEXT:  entry:
668 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
669 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
670 //
test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src)671 vfloat64m4_t test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src) {
672   return vreinterpret_f64m4(src);
673 }
674 
675 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_f64m8(
676 // CHECK-RV64-NEXT:  entry:
677 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
678 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
679 //
test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src)680 vfloat64m8_t test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src) {
681   return vreinterpret_f64m8(src);
682 }
683 
684 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_i16mf4(
685 // CHECK-RV64-NEXT:  entry:
686 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
687 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
688 //
test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src)689 vint16mf4_t test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src) {
690   return vreinterpret_i16mf4(src);
691 }
692 
693 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i16mf2(
694 // CHECK-RV64-NEXT:  entry:
695 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
696 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
697 //
test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src)698 vint16mf2_t test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src) {
699   return vreinterpret_i16mf2(src);
700 }
701 
702 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i16m1(
703 // CHECK-RV64-NEXT:  entry:
704 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
705 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
706 //
test_vreinterpret_v_i8m1_i16m1(vint8m1_t src)707 vint16m1_t test_vreinterpret_v_i8m1_i16m1(vint8m1_t src) {
708   return vreinterpret_i16m1(src);
709 }
710 
711 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i16m2(
712 // CHECK-RV64-NEXT:  entry:
713 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
714 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
715 //
test_vreinterpret_v_i8m2_i16m2(vint8m2_t src)716 vint16m2_t test_vreinterpret_v_i8m2_i16m2(vint8m2_t src) {
717   return vreinterpret_i16m2(src);
718 }
719 
720 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i16m4(
721 // CHECK-RV64-NEXT:  entry:
722 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
723 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
724 //
test_vreinterpret_v_i8m4_i16m4(vint8m4_t src)725 vint16m4_t test_vreinterpret_v_i8m4_i16m4(vint8m4_t src) {
726   return vreinterpret_i16m4(src);
727 }
728 
729 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i16m8(
730 // CHECK-RV64-NEXT:  entry:
731 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
732 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
733 //
test_vreinterpret_v_i8m8_i16m8(vint8m8_t src)734 vint16m8_t test_vreinterpret_v_i8m8_i16m8(vint8m8_t src) {
735   return vreinterpret_i16m8(src);
736 }
737 
738 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_u16mf4(
739 // CHECK-RV64-NEXT:  entry:
740 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
741 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
742 //
test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src)743 vuint16mf4_t test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src) {
744   return vreinterpret_u16mf4(src);
745 }
746 
747 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u16mf2(
748 // CHECK-RV64-NEXT:  entry:
749 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
750 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
751 //
test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src)752 vuint16mf2_t test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src) {
753   return vreinterpret_u16mf2(src);
754 }
755 
756 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u16m1(
757 // CHECK-RV64-NEXT:  entry:
758 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
759 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
760 //
test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src)761 vuint16m1_t test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src) {
762   return vreinterpret_u16m1(src);
763 }
764 
765 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u16m2(
766 // CHECK-RV64-NEXT:  entry:
767 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
768 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
769 //
test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src)770 vuint16m2_t test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src) {
771   return vreinterpret_u16m2(src);
772 }
773 
774 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u16m4(
775 // CHECK-RV64-NEXT:  entry:
776 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
777 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
778 //
test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src)779 vuint16m4_t test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src) {
780   return vreinterpret_u16m4(src);
781 }
782 
783 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u16m8(
784 // CHECK-RV64-NEXT:  entry:
785 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
786 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
787 //
test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src)788 vuint16m8_t test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src) {
789   return vreinterpret_u16m8(src);
790 }
791 
792 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i32mf2(
793 // CHECK-RV64-NEXT:  entry:
794 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
795 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
796 //
test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src)797 vint32mf2_t test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src) {
798   return vreinterpret_i32mf2(src);
799 }
800 
801 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i32m1(
802 // CHECK-RV64-NEXT:  entry:
803 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
804 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
805 //
test_vreinterpret_v_i8m1_i32m1(vint8m1_t src)806 vint32m1_t test_vreinterpret_v_i8m1_i32m1(vint8m1_t src) {
807   return vreinterpret_i32m1(src);
808 }
809 
810 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i32m2(
811 // CHECK-RV64-NEXT:  entry:
812 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
813 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
814 //
test_vreinterpret_v_i8m2_i32m2(vint8m2_t src)815 vint32m2_t test_vreinterpret_v_i8m2_i32m2(vint8m2_t src) {
816   return vreinterpret_i32m2(src);
817 }
818 
819 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i32m4(
820 // CHECK-RV64-NEXT:  entry:
821 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
822 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
823 //
test_vreinterpret_v_i8m4_i32m4(vint8m4_t src)824 vint32m4_t test_vreinterpret_v_i8m4_i32m4(vint8m4_t src) {
825   return vreinterpret_i32m4(src);
826 }
827 
828 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i32m8(
829 // CHECK-RV64-NEXT:  entry:
830 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
831 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
832 //
test_vreinterpret_v_i8m8_i32m8(vint8m8_t src)833 vint32m8_t test_vreinterpret_v_i8m8_i32m8(vint8m8_t src) {
834   return vreinterpret_i32m8(src);
835 }
836 
837 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u32mf2(
838 // CHECK-RV64-NEXT:  entry:
839 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
840 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
841 //
test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src)842 vuint32mf2_t test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src) {
843   return vreinterpret_u32mf2(src);
844 }
845 
846 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u32m1(
847 // CHECK-RV64-NEXT:  entry:
848 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
849 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
850 //
test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src)851 vuint32m1_t test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src) {
852   return vreinterpret_u32m1(src);
853 }
854 
855 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u32m2(
856 // CHECK-RV64-NEXT:  entry:
857 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
858 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
859 //
test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src)860 vuint32m2_t test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src) {
861   return vreinterpret_u32m2(src);
862 }
863 
864 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u32m4(
865 // CHECK-RV64-NEXT:  entry:
866 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
867 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
868 //
test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src)869 vuint32m4_t test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src) {
870   return vreinterpret_u32m4(src);
871 }
872 
873 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u32m8(
874 // CHECK-RV64-NEXT:  entry:
875 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
876 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
877 //
test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src)878 vuint32m8_t test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src) {
879   return vreinterpret_u32m8(src);
880 }
881 
882 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i64m1(
883 // CHECK-RV64-NEXT:  entry:
884 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
885 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
886 //
test_vreinterpret_v_i8m1_i64m1(vint8m1_t src)887 vint64m1_t test_vreinterpret_v_i8m1_i64m1(vint8m1_t src) {
888   return vreinterpret_i64m1(src);
889 }
890 
891 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i64m2(
892 // CHECK-RV64-NEXT:  entry:
893 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
894 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
895 //
test_vreinterpret_v_i8m2_i64m2(vint8m2_t src)896 vint64m2_t test_vreinterpret_v_i8m2_i64m2(vint8m2_t src) {
897   return vreinterpret_i64m2(src);
898 }
899 
900 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i64m4(
901 // CHECK-RV64-NEXT:  entry:
902 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
903 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
904 //
test_vreinterpret_v_i8m4_i64m4(vint8m4_t src)905 vint64m4_t test_vreinterpret_v_i8m4_i64m4(vint8m4_t src) {
906   return vreinterpret_i64m4(src);
907 }
908 
909 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i64m8(
910 // CHECK-RV64-NEXT:  entry:
911 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
912 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
913 //
test_vreinterpret_v_i8m8_i64m8(vint8m8_t src)914 vint64m8_t test_vreinterpret_v_i8m8_i64m8(vint8m8_t src) {
915   return vreinterpret_i64m8(src);
916 }
917 
918 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u64m1(
919 // CHECK-RV64-NEXT:  entry:
920 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
921 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
922 //
test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src)923 vuint64m1_t test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src) {
924   return vreinterpret_u64m1(src);
925 }
926 
927 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u64m2(
928 // CHECK-RV64-NEXT:  entry:
929 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
930 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
931 //
test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src)932 vuint64m2_t test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src) {
933   return vreinterpret_u64m2(src);
934 }
935 
936 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u64m4(
937 // CHECK-RV64-NEXT:  entry:
938 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
939 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
940 //
test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src)941 vuint64m4_t test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src) {
942   return vreinterpret_u64m4(src);
943 }
944 
945 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u64m8(
946 // CHECK-RV64-NEXT:  entry:
947 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
948 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
949 //
test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src)950 vuint64m8_t test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src) {
951   return vreinterpret_u64m8(src);
952 }
953 
954 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_i8mf4(
955 // CHECK-RV64-NEXT:  entry:
956 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
957 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
958 //
test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src)959 vint8mf4_t test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src) {
960   return vreinterpret_i8mf4(src);
961 }
962 
963 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i8mf2(
964 // CHECK-RV64-NEXT:  entry:
965 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
966 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
967 //
test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src)968 vint8mf2_t test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src) {
969   return vreinterpret_i8mf2(src);
970 }
971 
972 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i8m1(
973 // CHECK-RV64-NEXT:  entry:
974 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
975 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
976 //
test_vreinterpret_v_i16m1_i8m1(vint16m1_t src)977 vint8m1_t test_vreinterpret_v_i16m1_i8m1(vint16m1_t src) {
978   return vreinterpret_i8m1(src);
979 }
980 
981 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i8m2(
982 // CHECK-RV64-NEXT:  entry:
983 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
984 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
985 //
test_vreinterpret_v_i16m2_i8m2(vint16m2_t src)986 vint8m2_t test_vreinterpret_v_i16m2_i8m2(vint16m2_t src) {
987   return vreinterpret_i8m2(src);
988 }
989 
990 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i8m4(
991 // CHECK-RV64-NEXT:  entry:
992 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
993 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
994 //
test_vreinterpret_v_i16m4_i8m4(vint16m4_t src)995 vint8m4_t test_vreinterpret_v_i16m4_i8m4(vint16m4_t src) {
996   return vreinterpret_i8m4(src);
997 }
998 
999 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i8m8(
1000 // CHECK-RV64-NEXT:  entry:
1001 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1002 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1003 //
test_vreinterpret_v_i16m8_i8m8(vint16m8_t src)1004 vint8m8_t test_vreinterpret_v_i16m8_i8m8(vint16m8_t src) {
1005   return vreinterpret_i8m8(src);
1006 }
1007 
1008 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_u8mf4(
1009 // CHECK-RV64-NEXT:  entry:
1010 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
1011 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
1012 //
test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src)1013 vuint8mf4_t test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src) {
1014   return vreinterpret_u8mf4(src);
1015 }
1016 
1017 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u8mf2(
1018 // CHECK-RV64-NEXT:  entry:
1019 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
1020 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1021 //
test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src)1022 vuint8mf2_t test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src) {
1023   return vreinterpret_u8mf2(src);
1024 }
1025 
1026 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u8m1(
1027 // CHECK-RV64-NEXT:  entry:
1028 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
1029 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1030 //
test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src)1031 vuint8m1_t test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src) {
1032   return vreinterpret_u8m1(src);
1033 }
1034 
1035 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u8m2(
1036 // CHECK-RV64-NEXT:  entry:
1037 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
1038 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1039 //
test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src)1040 vuint8m2_t test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src) {
1041   return vreinterpret_u8m2(src);
1042 }
1043 
1044 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u8m4(
1045 // CHECK-RV64-NEXT:  entry:
1046 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
1047 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1048 //
test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src)1049 vuint8m4_t test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src) {
1050   return vreinterpret_u8m4(src);
1051 }
1052 
1053 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u8m8(
1054 // CHECK-RV64-NEXT:  entry:
1055 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1056 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1057 //
test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src)1058 vuint8m8_t test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src) {
1059   return vreinterpret_u8m8(src);
1060 }
1061 
1062 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i32mf2(
1063 // CHECK-RV64-NEXT:  entry:
1064 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1065 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1066 //
test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src)1067 vint32mf2_t test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src) {
1068   return vreinterpret_i32mf2(src);
1069 }
1070 
1071 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i32m1(
1072 // CHECK-RV64-NEXT:  entry:
1073 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1074 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1075 //
test_vreinterpret_v_i16m1_i32m1(vint16m1_t src)1076 vint32m1_t test_vreinterpret_v_i16m1_i32m1(vint16m1_t src) {
1077   return vreinterpret_i32m1(src);
1078 }
1079 
1080 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i32m2(
1081 // CHECK-RV64-NEXT:  entry:
1082 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1083 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1084 //
test_vreinterpret_v_i16m2_i32m2(vint16m2_t src)1085 vint32m2_t test_vreinterpret_v_i16m2_i32m2(vint16m2_t src) {
1086   return vreinterpret_i32m2(src);
1087 }
1088 
1089 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i32m4(
1090 // CHECK-RV64-NEXT:  entry:
1091 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1092 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1093 //
test_vreinterpret_v_i16m4_i32m4(vint16m4_t src)1094 vint32m4_t test_vreinterpret_v_i16m4_i32m4(vint16m4_t src) {
1095   return vreinterpret_i32m4(src);
1096 }
1097 
1098 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i32m8(
1099 // CHECK-RV64-NEXT:  entry:
1100 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1101 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1102 //
test_vreinterpret_v_i16m8_i32m8(vint16m8_t src)1103 vint32m8_t test_vreinterpret_v_i16m8_i32m8(vint16m8_t src) {
1104   return vreinterpret_i32m8(src);
1105 }
1106 
1107 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u32mf2(
1108 // CHECK-RV64-NEXT:  entry:
1109 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1110 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1111 //
test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src)1112 vuint32mf2_t test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src) {
1113   return vreinterpret_u32mf2(src);
1114 }
1115 
1116 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u32m1(
1117 // CHECK-RV64-NEXT:  entry:
1118 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1119 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1120 //
test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src)1121 vuint32m1_t test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src) {
1122   return vreinterpret_u32m1(src);
1123 }
1124 
1125 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u32m2(
1126 // CHECK-RV64-NEXT:  entry:
1127 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1128 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1129 //
test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src)1130 vuint32m2_t test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src) {
1131   return vreinterpret_u32m2(src);
1132 }
1133 
1134 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u32m4(
1135 // CHECK-RV64-NEXT:  entry:
1136 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1137 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1138 //
test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src)1139 vuint32m4_t test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src) {
1140   return vreinterpret_u32m4(src);
1141 }
1142 
1143 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u32m8(
1144 // CHECK-RV64-NEXT:  entry:
1145 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1146 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1147 //
test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src)1148 vuint32m8_t test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src) {
1149   return vreinterpret_u32m8(src);
1150 }
1151 
1152 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i64m1(
1153 // CHECK-RV64-NEXT:  entry:
1154 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1155 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1156 //
test_vreinterpret_v_i16m1_i64m1(vint16m1_t src)1157 vint64m1_t test_vreinterpret_v_i16m1_i64m1(vint16m1_t src) {
1158   return vreinterpret_i64m1(src);
1159 }
1160 
1161 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i64m2(
1162 // CHECK-RV64-NEXT:  entry:
1163 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1164 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1165 //
test_vreinterpret_v_i16m2_i64m2(vint16m2_t src)1166 vint64m2_t test_vreinterpret_v_i16m2_i64m2(vint16m2_t src) {
1167   return vreinterpret_i64m2(src);
1168 }
1169 
1170 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i64m4(
1171 // CHECK-RV64-NEXT:  entry:
1172 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1173 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1174 //
test_vreinterpret_v_i16m4_i64m4(vint16m4_t src)1175 vint64m4_t test_vreinterpret_v_i16m4_i64m4(vint16m4_t src) {
1176   return vreinterpret_i64m4(src);
1177 }
1178 
1179 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i64m8(
1180 // CHECK-RV64-NEXT:  entry:
1181 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1182 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1183 //
test_vreinterpret_v_i16m8_i64m8(vint16m8_t src)1184 vint64m8_t test_vreinterpret_v_i16m8_i64m8(vint16m8_t src) {
1185   return vreinterpret_i64m8(src);
1186 }
1187 
1188 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u64m1(
1189 // CHECK-RV64-NEXT:  entry:
1190 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1191 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1192 //
test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src)1193 vuint64m1_t test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src) {
1194   return vreinterpret_u64m1(src);
1195 }
1196 
1197 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u64m2(
1198 // CHECK-RV64-NEXT:  entry:
1199 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1200 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1201 //
test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src)1202 vuint64m2_t test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src) {
1203   return vreinterpret_u64m2(src);
1204 }
1205 
1206 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u64m4(
1207 // CHECK-RV64-NEXT:  entry:
1208 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1209 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1210 //
test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src)1211 vuint64m4_t test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src) {
1212   return vreinterpret_u64m4(src);
1213 }
1214 
1215 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u64m8(
1216 // CHECK-RV64-NEXT:  entry:
1217 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1218 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1219 //
test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src)1220 vuint64m8_t test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src) {
1221   return vreinterpret_u64m8(src);
1222 }
1223 
1224 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i8mf2(
1225 // CHECK-RV64-NEXT:  entry:
1226 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1227 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1228 //
test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src)1229 vint8mf2_t test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src) {
1230   return vreinterpret_i8mf2(src);
1231 }
1232 
1233 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i8m1(
1234 // CHECK-RV64-NEXT:  entry:
1235 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1236 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1237 //
test_vreinterpret_v_i32m1_i8m1(vint32m1_t src)1238 vint8m1_t test_vreinterpret_v_i32m1_i8m1(vint32m1_t src) {
1239   return vreinterpret_i8m1(src);
1240 }
1241 
1242 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i8m2(
1243 // CHECK-RV64-NEXT:  entry:
1244 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1245 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1246 //
test_vreinterpret_v_i32m2_i8m2(vint32m2_t src)1247 vint8m2_t test_vreinterpret_v_i32m2_i8m2(vint32m2_t src) {
1248   return vreinterpret_i8m2(src);
1249 }
1250 
1251 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i8m4(
1252 // CHECK-RV64-NEXT:  entry:
1253 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1254 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1255 //
test_vreinterpret_v_i32m4_i8m4(vint32m4_t src)1256 vint8m4_t test_vreinterpret_v_i32m4_i8m4(vint32m4_t src) {
1257   return vreinterpret_i8m4(src);
1258 }
1259 
1260 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i8m8(
1261 // CHECK-RV64-NEXT:  entry:
1262 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1263 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1264 //
test_vreinterpret_v_i32m8_i8m8(vint32m8_t src)1265 vint8m8_t test_vreinterpret_v_i32m8_i8m8(vint32m8_t src) {
1266   return vreinterpret_i8m8(src);
1267 }
1268 
1269 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u8mf2(
1270 // CHECK-RV64-NEXT:  entry:
1271 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1272 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1273 //
test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src)1274 vuint8mf2_t test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src) {
1275   return vreinterpret_u8mf2(src);
1276 }
1277 
1278 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u8m1(
1279 // CHECK-RV64-NEXT:  entry:
1280 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1281 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1282 //
test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src)1283 vuint8m1_t test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src) {
1284   return vreinterpret_u8m1(src);
1285 }
1286 
1287 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u8m2(
1288 // CHECK-RV64-NEXT:  entry:
1289 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1290 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1291 //
test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src)1292 vuint8m2_t test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src) {
1293   return vreinterpret_u8m2(src);
1294 }
1295 
1296 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u8m4(
1297 // CHECK-RV64-NEXT:  entry:
1298 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1299 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1300 //
test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src)1301 vuint8m4_t test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src) {
1302   return vreinterpret_u8m4(src);
1303 }
1304 
1305 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u8m8(
1306 // CHECK-RV64-NEXT:  entry:
1307 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1308 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1309 //
test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src)1310 vuint8m8_t test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src) {
1311   return vreinterpret_u8m8(src);
1312 }
1313 
1314 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i16mf2(
1315 // CHECK-RV64-NEXT:  entry:
1316 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1317 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1318 //
test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src)1319 vint16mf2_t test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src) {
1320   return vreinterpret_i16mf2(src);
1321 }
1322 
1323 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i16m1(
1324 // CHECK-RV64-NEXT:  entry:
1325 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1326 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1327 //
test_vreinterpret_v_i32m1_i16m1(vint32m1_t src)1328 vint16m1_t test_vreinterpret_v_i32m1_i16m1(vint32m1_t src) {
1329   return vreinterpret_i16m1(src);
1330 }
1331 
1332 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i16m2(
1333 // CHECK-RV64-NEXT:  entry:
1334 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1335 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1336 //
test_vreinterpret_v_i32m2_i16m2(vint32m2_t src)1337 vint16m2_t test_vreinterpret_v_i32m2_i16m2(vint32m2_t src) {
1338   return vreinterpret_i16m2(src);
1339 }
1340 
1341 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i16m4(
1342 // CHECK-RV64-NEXT:  entry:
1343 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1344 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1345 //
test_vreinterpret_v_i32m4_i16m4(vint32m4_t src)1346 vint16m4_t test_vreinterpret_v_i32m4_i16m4(vint32m4_t src) {
1347   return vreinterpret_i16m4(src);
1348 }
1349 
1350 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i16m8(
1351 // CHECK-RV64-NEXT:  entry:
1352 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1353 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1354 //
test_vreinterpret_v_i32m8_i16m8(vint32m8_t src)1355 vint16m8_t test_vreinterpret_v_i32m8_i16m8(vint32m8_t src) {
1356   return vreinterpret_i16m8(src);
1357 }
1358 
1359 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u16mf2(
1360 // CHECK-RV64-NEXT:  entry:
1361 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1362 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1363 //
test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src)1364 vuint16mf2_t test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src) {
1365   return vreinterpret_u16mf2(src);
1366 }
1367 
1368 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u16m1(
1369 // CHECK-RV64-NEXT:  entry:
1370 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1371 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1372 //
test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src)1373 vuint16m1_t test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src) {
1374   return vreinterpret_u16m1(src);
1375 }
1376 
1377 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u16m2(
1378 // CHECK-RV64-NEXT:  entry:
1379 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1380 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1381 //
test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src)1382 vuint16m2_t test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src) {
1383   return vreinterpret_u16m2(src);
1384 }
1385 
1386 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u16m4(
1387 // CHECK-RV64-NEXT:  entry:
1388 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1389 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1390 //
test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src)1391 vuint16m4_t test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src) {
1392   return vreinterpret_u16m4(src);
1393 }
1394 
1395 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u16m8(
1396 // CHECK-RV64-NEXT:  entry:
1397 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1398 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1399 //
test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src)1400 vuint16m8_t test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src) {
1401   return vreinterpret_u16m8(src);
1402 }
1403 
1404 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i64m1(
1405 // CHECK-RV64-NEXT:  entry:
1406 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1407 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1408 //
test_vreinterpret_v_i32m1_i64m1(vint32m1_t src)1409 vint64m1_t test_vreinterpret_v_i32m1_i64m1(vint32m1_t src) {
1410   return vreinterpret_i64m1(src);
1411 }
1412 
1413 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i64m2(
1414 // CHECK-RV64-NEXT:  entry:
1415 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1416 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1417 //
test_vreinterpret_v_i32m2_i64m2(vint32m2_t src)1418 vint64m2_t test_vreinterpret_v_i32m2_i64m2(vint32m2_t src) {
1419   return vreinterpret_i64m2(src);
1420 }
1421 
1422 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i64m4(
1423 // CHECK-RV64-NEXT:  entry:
1424 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1425 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1426 //
test_vreinterpret_v_i32m4_i64m4(vint32m4_t src)1427 vint64m4_t test_vreinterpret_v_i32m4_i64m4(vint32m4_t src) {
1428   return vreinterpret_i64m4(src);
1429 }
1430 
1431 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i64m8(
1432 // CHECK-RV64-NEXT:  entry:
1433 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1434 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1435 //
test_vreinterpret_v_i32m8_i64m8(vint32m8_t src)1436 vint64m8_t test_vreinterpret_v_i32m8_i64m8(vint32m8_t src) {
1437   return vreinterpret_i64m8(src);
1438 }
1439 
1440 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u64m1(
1441 // CHECK-RV64-NEXT:  entry:
1442 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1443 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1444 //
test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src)1445 vuint64m1_t test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src) {
1446   return vreinterpret_u64m1(src);
1447 }
1448 
1449 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u64m2(
1450 // CHECK-RV64-NEXT:  entry:
1451 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1452 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1453 //
test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src)1454 vuint64m2_t test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src) {
1455   return vreinterpret_u64m2(src);
1456 }
1457 
1458 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u64m4(
1459 // CHECK-RV64-NEXT:  entry:
1460 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1461 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1462 //
test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src)1463 vuint64m4_t test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src) {
1464   return vreinterpret_u64m4(src);
1465 }
1466 
1467 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u64m8(
1468 // CHECK-RV64-NEXT:  entry:
1469 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1470 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1471 //
test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src)1472 vuint64m8_t test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src) {
1473   return vreinterpret_u64m8(src);
1474 }
1475 
1476 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i8m1(
1477 // CHECK-RV64-NEXT:  entry:
1478 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1479 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1480 //
test_vreinterpret_v_i64m1_i8m1(vint64m1_t src)1481 vint8m1_t test_vreinterpret_v_i64m1_i8m1(vint64m1_t src) {
1482   return vreinterpret_i8m1(src);
1483 }
1484 
1485 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i8m2(
1486 // CHECK-RV64-NEXT:  entry:
1487 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1488 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1489 //
test_vreinterpret_v_i64m2_i8m2(vint64m2_t src)1490 vint8m2_t test_vreinterpret_v_i64m2_i8m2(vint64m2_t src) {
1491   return vreinterpret_i8m2(src);
1492 }
1493 
1494 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i8m4(
1495 // CHECK-RV64-NEXT:  entry:
1496 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1497 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1498 //
test_vreinterpret_v_i64m4_i8m4(vint64m4_t src)1499 vint8m4_t test_vreinterpret_v_i64m4_i8m4(vint64m4_t src) {
1500   return vreinterpret_i8m4(src);
1501 }
1502 
1503 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i8m8(
1504 // CHECK-RV64-NEXT:  entry:
1505 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1506 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1507 //
test_vreinterpret_v_i64m8_i8m8(vint64m8_t src)1508 vint8m8_t test_vreinterpret_v_i64m8_i8m8(vint64m8_t src) {
1509   return vreinterpret_i8m8(src);
1510 }
1511 
1512 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u8m1(
1513 // CHECK-RV64-NEXT:  entry:
1514 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1515 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1516 //
test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src)1517 vuint8m1_t test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src) {
1518   return vreinterpret_u8m1(src);
1519 }
1520 
1521 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u8m2(
1522 // CHECK-RV64-NEXT:  entry:
1523 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1524 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1525 //
test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src)1526 vuint8m2_t test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src) {
1527   return vreinterpret_u8m2(src);
1528 }
1529 
1530 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u8m4(
1531 // CHECK-RV64-NEXT:  entry:
1532 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1533 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1534 //
test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src)1535 vuint8m4_t test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src) {
1536   return vreinterpret_u8m4(src);
1537 }
1538 
1539 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u8m8(
1540 // CHECK-RV64-NEXT:  entry:
1541 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1542 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1543 //
test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src)1544 vuint8m8_t test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src) {
1545   return vreinterpret_u8m8(src);
1546 }
1547 
1548 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i16m1(
1549 // CHECK-RV64-NEXT:  entry:
1550 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1551 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1552 //
test_vreinterpret_v_i64m1_i16m1(vint64m1_t src)1553 vint16m1_t test_vreinterpret_v_i64m1_i16m1(vint64m1_t src) {
1554   return vreinterpret_i16m1(src);
1555 }
1556 
1557 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i16m2(
1558 // CHECK-RV64-NEXT:  entry:
1559 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1560 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1561 //
test_vreinterpret_v_i64m2_i16m2(vint64m2_t src)1562 vint16m2_t test_vreinterpret_v_i64m2_i16m2(vint64m2_t src) {
1563   return vreinterpret_i16m2(src);
1564 }
1565 
1566 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i16m4(
1567 // CHECK-RV64-NEXT:  entry:
1568 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1569 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1570 //
test_vreinterpret_v_i64m4_i16m4(vint64m4_t src)1571 vint16m4_t test_vreinterpret_v_i64m4_i16m4(vint64m4_t src) {
1572   return vreinterpret_i16m4(src);
1573 }
1574 
1575 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i16m8(
1576 // CHECK-RV64-NEXT:  entry:
1577 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1578 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1579 //
test_vreinterpret_v_i64m8_i16m8(vint64m8_t src)1580 vint16m8_t test_vreinterpret_v_i64m8_i16m8(vint64m8_t src) {
1581   return vreinterpret_i16m8(src);
1582 }
1583 
1584 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u16m1(
1585 // CHECK-RV64-NEXT:  entry:
1586 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1587 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1588 //
test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src)1589 vuint16m1_t test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src) {
1590   return vreinterpret_u16m1(src);
1591 }
1592 
1593 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u16m2(
1594 // CHECK-RV64-NEXT:  entry:
1595 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1596 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1597 //
test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src)1598 vuint16m2_t test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src) {
1599   return vreinterpret_u16m2(src);
1600 }
1601 
1602 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u16m4(
1603 // CHECK-RV64-NEXT:  entry:
1604 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1605 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1606 //
test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src)1607 vuint16m4_t test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src) {
1608   return vreinterpret_u16m4(src);
1609 }
1610 
1611 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u16m8(
1612 // CHECK-RV64-NEXT:  entry:
1613 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1614 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1615 //
test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src)1616 vuint16m8_t test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src) {
1617   return vreinterpret_u16m8(src);
1618 }
1619 
1620 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i32m1(
1621 // CHECK-RV64-NEXT:  entry:
1622 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1623 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1624 //
test_vreinterpret_v_i64m1_i32m1(vint64m1_t src)1625 vint32m1_t test_vreinterpret_v_i64m1_i32m1(vint64m1_t src) {
1626   return vreinterpret_i32m1(src);
1627 }
1628 
1629 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i32m2(
1630 // CHECK-RV64-NEXT:  entry:
1631 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1632 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1633 //
test_vreinterpret_v_i64m2_i32m2(vint64m2_t src)1634 vint32m2_t test_vreinterpret_v_i64m2_i32m2(vint64m2_t src) {
1635   return vreinterpret_i32m2(src);
1636 }
1637 
1638 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i32m4(
1639 // CHECK-RV64-NEXT:  entry:
1640 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1641 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1642 //
test_vreinterpret_v_i64m4_i32m4(vint64m4_t src)1643 vint32m4_t test_vreinterpret_v_i64m4_i32m4(vint64m4_t src) {
1644   return vreinterpret_i32m4(src);
1645 }
1646 
1647 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i32m8(
1648 // CHECK-RV64-NEXT:  entry:
1649 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1650 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1651 //
test_vreinterpret_v_i64m8_i32m8(vint64m8_t src)1652 vint32m8_t test_vreinterpret_v_i64m8_i32m8(vint64m8_t src) {
1653   return vreinterpret_i32m8(src);
1654 }
1655 
1656 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u32m1(
1657 // CHECK-RV64-NEXT:  entry:
1658 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1659 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1660 //
test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src)1661 vuint32m1_t test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src) {
1662   return vreinterpret_u32m1(src);
1663 }
1664 
1665 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u32m2(
1666 // CHECK-RV64-NEXT:  entry:
1667 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1668 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1669 //
test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src)1670 vuint32m2_t test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src) {
1671   return vreinterpret_u32m2(src);
1672 }
1673 
1674 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u32m4(
1675 // CHECK-RV64-NEXT:  entry:
1676 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1677 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1678 //
test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src)1679 vuint32m4_t test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src) {
1680   return vreinterpret_u32m4(src);
1681 }
1682 
1683 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u32m8(
1684 // CHECK-RV64-NEXT:  entry:
1685 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1686 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1687 //
test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src)1688 vuint32m8_t test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src) {
1689   return vreinterpret_u32m8(src);
1690 }
1691