1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \
4 // RUN:   -target-feature +experimental-v -target-feature +experimental-zfh \
5 // RUN:   -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
6 
7 #include <riscv_vector.h>
8 
9 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf8_u8mf8(
10 // CHECK-RV64-NEXT:  entry:
11 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
12 //
test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src)13 vuint8mf8_t test_vreinterpret_v_i8mf8_u8mf8(vint8mf8_t src) {
14   return vreinterpret_v_i8mf8_u8mf8(src);
15 }
16 
17 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_u8mf4(
18 // CHECK-RV64-NEXT:  entry:
19 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
20 //
test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src)21 vuint8mf4_t test_vreinterpret_v_i8mf4_u8mf4(vint8mf4_t src) {
22   return vreinterpret_v_i8mf4_u8mf4(src);
23 }
24 
25 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_u8mf2(
26 // CHECK-RV64-NEXT:  entry:
27 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
28 //
test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src)29 vuint8mf2_t test_vreinterpret_v_i8mf2_u8mf2(vint8mf2_t src) {
30   return vreinterpret_v_i8mf2_u8mf2(src);
31 }
32 
33 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_u8m1(
34 // CHECK-RV64-NEXT:  entry:
35 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
36 //
test_vreinterpret_v_i8m1_u8m1(vint8m1_t src)37 vuint8m1_t test_vreinterpret_v_i8m1_u8m1(vint8m1_t src) {
38   return vreinterpret_v_i8m1_u8m1(src);
39 }
40 
41 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_u8m2(
42 // CHECK-RV64-NEXT:  entry:
43 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
44 //
test_vreinterpret_v_i8m2_u8m2(vint8m2_t src)45 vuint8m2_t test_vreinterpret_v_i8m2_u8m2(vint8m2_t src) {
46   return vreinterpret_v_i8m2_u8m2(src);
47 }
48 
49 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_u8m4(
50 // CHECK-RV64-NEXT:  entry:
51 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
52 //
test_vreinterpret_v_i8m4_u8m4(vint8m4_t src)53 vuint8m4_t test_vreinterpret_v_i8m4_u8m4(vint8m4_t src) {
54   return vreinterpret_v_i8m4_u8m4(src);
55 }
56 
57 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_u8m8(
58 // CHECK-RV64-NEXT:  entry:
59 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
60 //
test_vreinterpret_v_i8m8_u8m8(vint8m8_t src)61 vuint8m8_t test_vreinterpret_v_i8m8_u8m8(vint8m8_t src) {
62   return vreinterpret_v_i8m8_u8m8(src);
63 }
64 
65 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf8_i8mf8(
66 // CHECK-RV64-NEXT:  entry:
67 // CHECK-RV64-NEXT:    ret <vscale x 1 x i8> [[SRC:%.*]]
68 //
test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src)69 vint8mf8_t test_vreinterpret_v_u8mf8_i8mf8(vuint8mf8_t src) {
70   return vreinterpret_v_u8mf8_i8mf8(src);
71 }
72 
73 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_i8mf4(
74 // CHECK-RV64-NEXT:  entry:
75 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[SRC:%.*]]
76 //
test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src)77 vint8mf4_t test_vreinterpret_v_u8mf4_i8mf4(vuint8mf4_t src) {
78   return vreinterpret_v_u8mf4_i8mf4(src);
79 }
80 
81 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_i8mf2(
82 // CHECK-RV64-NEXT:  entry:
83 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[SRC:%.*]]
84 //
test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src)85 vint8mf2_t test_vreinterpret_v_u8mf2_i8mf2(vuint8mf2_t src) {
86   return vreinterpret_v_u8mf2_i8mf2(src);
87 }
88 
89 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_i8m1(
90 // CHECK-RV64-NEXT:  entry:
91 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[SRC:%.*]]
92 //
test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src)93 vint8m1_t test_vreinterpret_v_u8m1_i8m1(vuint8m1_t src) {
94   return vreinterpret_v_u8m1_i8m1(src);
95 }
96 
97 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_i8m2(
98 // CHECK-RV64-NEXT:  entry:
99 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[SRC:%.*]]
100 //
test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src)101 vint8m2_t test_vreinterpret_v_u8m2_i8m2(vuint8m2_t src) {
102   return vreinterpret_v_u8m2_i8m2(src);
103 }
104 
105 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_i8m4(
106 // CHECK-RV64-NEXT:  entry:
107 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[SRC:%.*]]
108 //
test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src)109 vint8m4_t test_vreinterpret_v_u8m4_i8m4(vuint8m4_t src) {
110   return vreinterpret_v_u8m4_i8m4(src);
111 }
112 
113 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_i8m8(
114 // CHECK-RV64-NEXT:  entry:
115 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[SRC:%.*]]
116 //
test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src)117 vint8m8_t test_vreinterpret_v_u8m8_i8m8(vuint8m8_t src) {
118   return vreinterpret_v_u8m8_i8m8(src);
119 }
120 
121 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_u16mf4(
122 // CHECK-RV64-NEXT:  entry:
123 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
124 //
test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src)125 vuint16mf4_t test_vreinterpret_v_i16mf4_u16mf4(vint16mf4_t src) {
126   return vreinterpret_v_i16mf4_u16mf4(src);
127 }
128 
129 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_u16mf2(
130 // CHECK-RV64-NEXT:  entry:
131 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
132 //
test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src)133 vuint16mf2_t test_vreinterpret_v_i16mf2_u16mf2(vint16mf2_t src) {
134   return vreinterpret_v_i16mf2_u16mf2(src);
135 }
136 
137 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_u16m1(
138 // CHECK-RV64-NEXT:  entry:
139 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
140 //
test_vreinterpret_v_i16m1_u16m1(vint16m1_t src)141 vuint16m1_t test_vreinterpret_v_i16m1_u16m1(vint16m1_t src) {
142   return vreinterpret_v_i16m1_u16m1(src);
143 }
144 
145 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_u16m2(
146 // CHECK-RV64-NEXT:  entry:
147 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
148 //
test_vreinterpret_v_i16m2_u16m2(vint16m2_t src)149 vuint16m2_t test_vreinterpret_v_i16m2_u16m2(vint16m2_t src) {
150   return vreinterpret_v_i16m2_u16m2(src);
151 }
152 
153 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_u16m4(
154 // CHECK-RV64-NEXT:  entry:
155 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
156 //
test_vreinterpret_v_i16m4_u16m4(vint16m4_t src)157 vuint16m4_t test_vreinterpret_v_i16m4_u16m4(vint16m4_t src) {
158   return vreinterpret_v_i16m4_u16m4(src);
159 }
160 
161 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_u16m8(
162 // CHECK-RV64-NEXT:  entry:
163 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
164 //
test_vreinterpret_v_i16m8_u16m8(vint16m8_t src)165 vuint16m8_t test_vreinterpret_v_i16m8_u16m8(vint16m8_t src) {
166   return vreinterpret_v_i16m8_u16m8(src);
167 }
168 
169 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_i16mf4(
170 // CHECK-RV64-NEXT:  entry:
171 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[SRC:%.*]]
172 //
test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src)173 vint16mf4_t test_vreinterpret_v_u16mf4_i16mf4(vuint16mf4_t src) {
174   return vreinterpret_v_u16mf4_i16mf4(src);
175 }
176 
177 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_i16mf2(
178 // CHECK-RV64-NEXT:  entry:
179 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[SRC:%.*]]
180 //
test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src)181 vint16mf2_t test_vreinterpret_v_u16mf2_i16mf2(vuint16mf2_t src) {
182   return vreinterpret_v_u16mf2_i16mf2(src);
183 }
184 
185 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_i16m1(
186 // CHECK-RV64-NEXT:  entry:
187 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[SRC:%.*]]
188 //
test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src)189 vint16m1_t test_vreinterpret_v_u16m1_i16m1(vuint16m1_t src) {
190   return vreinterpret_v_u16m1_i16m1(src);
191 }
192 
193 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_i16m2(
194 // CHECK-RV64-NEXT:  entry:
195 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[SRC:%.*]]
196 //
test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src)197 vint16m2_t test_vreinterpret_v_u16m2_i16m2(vuint16m2_t src) {
198   return vreinterpret_v_u16m2_i16m2(src);
199 }
200 
201 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_i16m4(
202 // CHECK-RV64-NEXT:  entry:
203 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[SRC:%.*]]
204 //
test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src)205 vint16m4_t test_vreinterpret_v_u16m4_i16m4(vuint16m4_t src) {
206   return vreinterpret_v_u16m4_i16m4(src);
207 }
208 
209 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_i16m8(
210 // CHECK-RV64-NEXT:  entry:
211 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[SRC:%.*]]
212 //
test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src)213 vint16m8_t test_vreinterpret_v_u16m8_i16m8(vuint16m8_t src) {
214   return vreinterpret_v_u16m8_i16m8(src);
215 }
216 
217 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16mf4_i16mf4(
218 // CHECK-RV64-NEXT:  entry:
219 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x half> [[SRC:%.*]] to <vscale x 1 x i16>
220 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
221 //
test_vreinterpret_v_f16mf4_i16mf4(vfloat16mf4_t src)222 vint16mf4_t test_vreinterpret_v_f16mf4_i16mf4(vfloat16mf4_t src) {
223   return vreinterpret_v_f16mf4_i16mf4(src);
224 }
225 
226 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16mf2_i16mf2(
227 // CHECK-RV64-NEXT:  entry:
228 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x half> [[SRC:%.*]] to <vscale x 2 x i16>
229 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
230 //
test_vreinterpret_v_f16mf2_i16mf2(vfloat16mf2_t src)231 vint16mf2_t test_vreinterpret_v_f16mf2_i16mf2(vfloat16mf2_t src) {
232   return vreinterpret_v_f16mf2_i16mf2(src);
233 }
234 
235 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m1_i16m1(
236 // CHECK-RV64-NEXT:  entry:
237 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x half> [[SRC:%.*]] to <vscale x 4 x i16>
238 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
239 //
test_vreinterpret_v_f16m1_i16m1(vfloat16m1_t src)240 vint16m1_t test_vreinterpret_v_f16m1_i16m1(vfloat16m1_t src) {
241   return vreinterpret_v_f16m1_i16m1(src);
242 }
243 
244 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m2_i16m2(
245 // CHECK-RV64-NEXT:  entry:
246 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x half> [[SRC:%.*]] to <vscale x 8 x i16>
247 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
248 //
test_vreinterpret_v_f16m2_i16m2(vfloat16m2_t src)249 vint16m2_t test_vreinterpret_v_f16m2_i16m2(vfloat16m2_t src) {
250   return vreinterpret_v_f16m2_i16m2(src);
251 }
252 
253 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m4_i16m4(
254 // CHECK-RV64-NEXT:  entry:
255 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x half> [[SRC:%.*]] to <vscale x 16 x i16>
256 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
257 //
test_vreinterpret_v_f16m4_i16m4(vfloat16m4_t src)258 vint16m4_t test_vreinterpret_v_f16m4_i16m4(vfloat16m4_t src) {
259   return vreinterpret_v_f16m4_i16m4(src);
260 }
261 
262 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m8_i16m8(
263 // CHECK-RV64-NEXT:  entry:
264 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x half> [[SRC:%.*]] to <vscale x 32 x i16>
265 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
266 //
test_vreinterpret_v_f16m8_i16m8(vfloat16m8_t src)267 vint16m8_t test_vreinterpret_v_f16m8_i16m8(vfloat16m8_t src) {
268   return vreinterpret_v_f16m8_i16m8(src);
269 }
270 
271 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16mf4_u16mf4(
272 // CHECK-RV64-NEXT:  entry:
273 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x half> [[SRC:%.*]] to <vscale x 1 x i16>
274 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
275 //
test_vreinterpret_v_f16mf4_u16mf4(vfloat16mf4_t src)276 vuint16mf4_t test_vreinterpret_v_f16mf4_u16mf4(vfloat16mf4_t src) {
277   return vreinterpret_v_f16mf4_u16mf4(src);
278 }
279 
280 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16mf2_u16mf2(
281 // CHECK-RV64-NEXT:  entry:
282 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x half> [[SRC:%.*]] to <vscale x 2 x i16>
283 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
284 //
test_vreinterpret_v_f16mf2_u16mf2(vfloat16mf2_t src)285 vuint16mf2_t test_vreinterpret_v_f16mf2_u16mf2(vfloat16mf2_t src) {
286   return vreinterpret_v_f16mf2_u16mf2(src);
287 }
288 
289 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m1_u16m1(
290 // CHECK-RV64-NEXT:  entry:
291 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x half> [[SRC:%.*]] to <vscale x 4 x i16>
292 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
293 //
test_vreinterpret_v_f16m1_u16m1(vfloat16m1_t src)294 vuint16m1_t test_vreinterpret_v_f16m1_u16m1(vfloat16m1_t src) {
295   return vreinterpret_v_f16m1_u16m1(src);
296 }
297 
298 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m2_u16m2(
299 // CHECK-RV64-NEXT:  entry:
300 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x half> [[SRC:%.*]] to <vscale x 8 x i16>
301 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
302 //
test_vreinterpret_v_f16m2_u16m2(vfloat16m2_t src)303 vuint16m2_t test_vreinterpret_v_f16m2_u16m2(vfloat16m2_t src) {
304   return vreinterpret_v_f16m2_u16m2(src);
305 }
306 
307 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m4_u16m4(
308 // CHECK-RV64-NEXT:  entry:
309 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x half> [[SRC:%.*]] to <vscale x 16 x i16>
310 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
311 //
test_vreinterpret_v_f16m4_u16m4(vfloat16m4_t src)312 vuint16m4_t test_vreinterpret_v_f16m4_u16m4(vfloat16m4_t src) {
313   return vreinterpret_v_f16m4_u16m4(src);
314 }
315 
316 // CHECK-RV64-LABEL: @test_vreinterpret_v_f16m8_u16m8(
317 // CHECK-RV64-NEXT:  entry:
318 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x half> [[SRC:%.*]] to <vscale x 32 x i16>
319 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
320 //
test_vreinterpret_v_f16m8_u16m8(vfloat16m8_t src)321 vuint16m8_t test_vreinterpret_v_f16m8_u16m8(vfloat16m8_t src) {
322   return vreinterpret_v_f16m8_u16m8(src);
323 }
324 
325 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_f16mf4(
326 // CHECK-RV64-NEXT:  entry:
327 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 1 x half>
328 // CHECK-RV64-NEXT:    ret <vscale x 1 x half> [[TMP0]]
329 //
test_vreinterpret_v_i16mf4_f16mf4(vint16mf4_t src)330 vfloat16mf4_t test_vreinterpret_v_i16mf4_f16mf4(vint16mf4_t src) {
331   return vreinterpret_v_i16mf4_f16mf4(src);
332 }
333 
334 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_f16mf2(
335 // CHECK-RV64-NEXT:  entry:
336 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 2 x half>
337 // CHECK-RV64-NEXT:    ret <vscale x 2 x half> [[TMP0]]
338 //
test_vreinterpret_v_i16mf2_f16mf2(vint16mf2_t src)339 vfloat16mf2_t test_vreinterpret_v_i16mf2_f16mf2(vint16mf2_t src) {
340   return vreinterpret_v_i16mf2_f16mf2(src);
341 }
342 
343 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_f16m1(
344 // CHECK-RV64-NEXT:  entry:
345 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 4 x half>
346 // CHECK-RV64-NEXT:    ret <vscale x 4 x half> [[TMP0]]
347 //
test_vreinterpret_v_i16m1_f16m1(vint16m1_t src)348 vfloat16m1_t test_vreinterpret_v_i16m1_f16m1(vint16m1_t src) {
349   return vreinterpret_v_i16m1_f16m1(src);
350 }
351 
352 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_f16m2(
353 // CHECK-RV64-NEXT:  entry:
354 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 8 x half>
355 // CHECK-RV64-NEXT:    ret <vscale x 8 x half> [[TMP0]]
356 //
test_vreinterpret_v_i16m2_f16m2(vint16m2_t src)357 vfloat16m2_t test_vreinterpret_v_i16m2_f16m2(vint16m2_t src) {
358   return vreinterpret_v_i16m2_f16m2(src);
359 }
360 
361 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_f16m4(
362 // CHECK-RV64-NEXT:  entry:
363 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 16 x half>
364 // CHECK-RV64-NEXT:    ret <vscale x 16 x half> [[TMP0]]
365 //
test_vreinterpret_v_i16m4_f16m4(vint16m4_t src)366 vfloat16m4_t test_vreinterpret_v_i16m4_f16m4(vint16m4_t src) {
367   return vreinterpret_v_i16m4_f16m4(src);
368 }
369 
370 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_f16m8(
371 // CHECK-RV64-NEXT:  entry:
372 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 32 x half>
373 // CHECK-RV64-NEXT:    ret <vscale x 32 x half> [[TMP0]]
374 //
test_vreinterpret_v_i16m8_f16m8(vint16m8_t src)375 vfloat16m8_t test_vreinterpret_v_i16m8_f16m8(vint16m8_t src) {
376   return vreinterpret_v_i16m8_f16m8(src);
377 }
378 
379 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_f16mf4(
380 // CHECK-RV64-NEXT:  entry:
381 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 1 x half>
382 // CHECK-RV64-NEXT:    ret <vscale x 1 x half> [[TMP0]]
383 //
test_vreinterpret_v_u16mf4_f16mf4(vuint16mf4_t src)384 vfloat16mf4_t test_vreinterpret_v_u16mf4_f16mf4(vuint16mf4_t src) {
385   return vreinterpret_v_u16mf4_f16mf4(src);
386 }
387 
388 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_f16mf2(
389 // CHECK-RV64-NEXT:  entry:
390 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 2 x half>
391 // CHECK-RV64-NEXT:    ret <vscale x 2 x half> [[TMP0]]
392 //
test_vreinterpret_v_u16mf2_f16mf2(vuint16mf2_t src)393 vfloat16mf2_t test_vreinterpret_v_u16mf2_f16mf2(vuint16mf2_t src) {
394   return vreinterpret_v_u16mf2_f16mf2(src);
395 }
396 
397 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_f16m1(
398 // CHECK-RV64-NEXT:  entry:
399 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 4 x half>
400 // CHECK-RV64-NEXT:    ret <vscale x 4 x half> [[TMP0]]
401 //
test_vreinterpret_v_u16m1_f16m1(vuint16m1_t src)402 vfloat16m1_t test_vreinterpret_v_u16m1_f16m1(vuint16m1_t src) {
403   return vreinterpret_v_u16m1_f16m1(src);
404 }
405 
406 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_f16m2(
407 // CHECK-RV64-NEXT:  entry:
408 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 8 x half>
409 // CHECK-RV64-NEXT:    ret <vscale x 8 x half> [[TMP0]]
410 //
test_vreinterpret_v_u16m2_f16m2(vuint16m2_t src)411 vfloat16m2_t test_vreinterpret_v_u16m2_f16m2(vuint16m2_t src) {
412   return vreinterpret_v_u16m2_f16m2(src);
413 }
414 
415 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_f16m4(
416 // CHECK-RV64-NEXT:  entry:
417 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 16 x half>
418 // CHECK-RV64-NEXT:    ret <vscale x 16 x half> [[TMP0]]
419 //
test_vreinterpret_v_u16m4_f16m4(vuint16m4_t src)420 vfloat16m4_t test_vreinterpret_v_u16m4_f16m4(vuint16m4_t src) {
421   return vreinterpret_v_u16m4_f16m4(src);
422 }
423 
424 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_f16m8(
425 // CHECK-RV64-NEXT:  entry:
426 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 32 x half>
427 // CHECK-RV64-NEXT:    ret <vscale x 32 x half> [[TMP0]]
428 //
test_vreinterpret_v_u16m8_f16m8(vuint16m8_t src)429 vfloat16m8_t test_vreinterpret_v_u16m8_f16m8(vuint16m8_t src) {
430   return vreinterpret_v_u16m8_f16m8(src);
431 }
432 
433 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_u32mf2(
434 // CHECK-RV64-NEXT:  entry:
435 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
436 //
test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src)437 vuint32mf2_t test_vreinterpret_v_i32mf2_u32mf2(vint32mf2_t src) {
438   return vreinterpret_v_i32mf2_u32mf2(src);
439 }
440 
441 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_u32m1(
442 // CHECK-RV64-NEXT:  entry:
443 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
444 //
test_vreinterpret_v_i32m1_u32m1(vint32m1_t src)445 vuint32m1_t test_vreinterpret_v_i32m1_u32m1(vint32m1_t src) {
446   return vreinterpret_v_i32m1_u32m1(src);
447 }
448 
449 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_u32m2(
450 // CHECK-RV64-NEXT:  entry:
451 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
452 //
test_vreinterpret_v_i32m2_u32m2(vint32m2_t src)453 vuint32m2_t test_vreinterpret_v_i32m2_u32m2(vint32m2_t src) {
454   return vreinterpret_v_i32m2_u32m2(src);
455 }
456 
457 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_u32m4(
458 // CHECK-RV64-NEXT:  entry:
459 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
460 //
test_vreinterpret_v_i32m4_u32m4(vint32m4_t src)461 vuint32m4_t test_vreinterpret_v_i32m4_u32m4(vint32m4_t src) {
462   return vreinterpret_v_i32m4_u32m4(src);
463 }
464 
465 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_u32m8(
466 // CHECK-RV64-NEXT:  entry:
467 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
468 //
test_vreinterpret_v_i32m8_u32m8(vint32m8_t src)469 vuint32m8_t test_vreinterpret_v_i32m8_u32m8(vint32m8_t src) {
470   return vreinterpret_v_i32m8_u32m8(src);
471 }
472 
473 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_i32mf2(
474 // CHECK-RV64-NEXT:  entry:
475 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[SRC:%.*]]
476 //
test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src)477 vint32mf2_t test_vreinterpret_v_u32mf2_i32mf2(vuint32mf2_t src) {
478   return vreinterpret_v_u32mf2_i32mf2(src);
479 }
480 
481 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_i32m1(
482 // CHECK-RV64-NEXT:  entry:
483 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[SRC:%.*]]
484 //
test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src)485 vint32m1_t test_vreinterpret_v_u32m1_i32m1(vuint32m1_t src) {
486   return vreinterpret_v_u32m1_i32m1(src);
487 }
488 
489 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_i32m2(
490 // CHECK-RV64-NEXT:  entry:
491 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[SRC:%.*]]
492 //
test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src)493 vint32m2_t test_vreinterpret_v_u32m2_i32m2(vuint32m2_t src) {
494   return vreinterpret_v_u32m2_i32m2(src);
495 }
496 
497 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_i32m4(
498 // CHECK-RV64-NEXT:  entry:
499 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[SRC:%.*]]
500 //
test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src)501 vint32m4_t test_vreinterpret_v_u32m4_i32m4(vuint32m4_t src) {
502   return vreinterpret_v_u32m4_i32m4(src);
503 }
504 
505 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_i32m8(
506 // CHECK-RV64-NEXT:  entry:
507 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[SRC:%.*]]
508 //
test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src)509 vint32m8_t test_vreinterpret_v_u32m8_i32m8(vuint32m8_t src) {
510   return vreinterpret_v_u32m8_i32m8(src);
511 }
512 
513 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_i32mf2(
514 // CHECK-RV64-NEXT:  entry:
515 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
516 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
517 //
test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src)518 vint32mf2_t test_vreinterpret_v_f32mf2_i32mf2(vfloat32mf2_t src) {
519   return vreinterpret_v_f32mf2_i32mf2(src);
520 }
521 
522 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_i32m1(
523 // CHECK-RV64-NEXT:  entry:
524 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
525 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
526 //
test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src)527 vint32m1_t test_vreinterpret_v_f32m1_i32m1(vfloat32m1_t src) {
528   return vreinterpret_v_f32m1_i32m1(src);
529 }
530 
531 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_i32m2(
532 // CHECK-RV64-NEXT:  entry:
533 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
534 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
535 //
test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src)536 vint32m2_t test_vreinterpret_v_f32m2_i32m2(vfloat32m2_t src) {
537   return vreinterpret_v_f32m2_i32m2(src);
538 }
539 
540 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_i32m4(
541 // CHECK-RV64-NEXT:  entry:
542 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
543 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
544 //
test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src)545 vint32m4_t test_vreinterpret_v_f32m4_i32m4(vfloat32m4_t src) {
546   return vreinterpret_v_f32m4_i32m4(src);
547 }
548 
549 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_i32m8(
550 // CHECK-RV64-NEXT:  entry:
551 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
552 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
553 //
test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src)554 vint32m8_t test_vreinterpret_v_f32m8_i32m8(vfloat32m8_t src) {
555   return vreinterpret_v_f32m8_i32m8(src);
556 }
557 
558 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32mf2_u32mf2(
559 // CHECK-RV64-NEXT:  entry:
560 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x float> [[SRC:%.*]] to <vscale x 1 x i32>
561 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
562 //
test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src)563 vuint32mf2_t test_vreinterpret_v_f32mf2_u32mf2(vfloat32mf2_t src) {
564   return vreinterpret_v_f32mf2_u32mf2(src);
565 }
566 
567 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m1_u32m1(
568 // CHECK-RV64-NEXT:  entry:
569 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x float> [[SRC:%.*]] to <vscale x 2 x i32>
570 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
571 //
test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src)572 vuint32m1_t test_vreinterpret_v_f32m1_u32m1(vfloat32m1_t src) {
573   return vreinterpret_v_f32m1_u32m1(src);
574 }
575 
576 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m2_u32m2(
577 // CHECK-RV64-NEXT:  entry:
578 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x float> [[SRC:%.*]] to <vscale x 4 x i32>
579 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
580 //
test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src)581 vuint32m2_t test_vreinterpret_v_f32m2_u32m2(vfloat32m2_t src) {
582   return vreinterpret_v_f32m2_u32m2(src);
583 }
584 
585 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m4_u32m4(
586 // CHECK-RV64-NEXT:  entry:
587 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x float> [[SRC:%.*]] to <vscale x 8 x i32>
588 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
589 //
test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src)590 vuint32m4_t test_vreinterpret_v_f32m4_u32m4(vfloat32m4_t src) {
591   return vreinterpret_v_f32m4_u32m4(src);
592 }
593 
594 // CHECK-RV64-LABEL: @test_vreinterpret_v_f32m8_u32m8(
595 // CHECK-RV64-NEXT:  entry:
596 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x float> [[SRC:%.*]] to <vscale x 16 x i32>
597 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
598 //
test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src)599 vuint32m8_t test_vreinterpret_v_f32m8_u32m8(vfloat32m8_t src) {
600   return vreinterpret_v_f32m8_u32m8(src);
601 }
602 
603 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_f32mf2(
604 // CHECK-RV64-NEXT:  entry:
605 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
606 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
607 //
test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src)608 vfloat32mf2_t test_vreinterpret_v_i32mf2_f32mf2(vint32mf2_t src) {
609   return vreinterpret_v_i32mf2_f32mf2(src);
610 }
611 
612 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_f32m1(
613 // CHECK-RV64-NEXT:  entry:
614 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
615 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
616 //
test_vreinterpret_v_i32m1_f32m1(vint32m1_t src)617 vfloat32m1_t test_vreinterpret_v_i32m1_f32m1(vint32m1_t src) {
618   return vreinterpret_v_i32m1_f32m1(src);
619 }
620 
621 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_f32m2(
622 // CHECK-RV64-NEXT:  entry:
623 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
624 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
625 //
test_vreinterpret_v_i32m2_f32m2(vint32m2_t src)626 vfloat32m2_t test_vreinterpret_v_i32m2_f32m2(vint32m2_t src) {
627   return vreinterpret_v_i32m2_f32m2(src);
628 }
629 
630 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_f32m4(
631 // CHECK-RV64-NEXT:  entry:
632 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
633 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
634 //
test_vreinterpret_v_i32m4_f32m4(vint32m4_t src)635 vfloat32m4_t test_vreinterpret_v_i32m4_f32m4(vint32m4_t src) {
636   return vreinterpret_v_i32m4_f32m4(src);
637 }
638 
639 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_f32m8(
640 // CHECK-RV64-NEXT:  entry:
641 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
642 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
643 //
test_vreinterpret_v_i32m8_f32m8(vint32m8_t src)644 vfloat32m8_t test_vreinterpret_v_i32m8_f32m8(vint32m8_t src) {
645   return vreinterpret_v_i32m8_f32m8(src);
646 }
647 
648 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_f32mf2(
649 // CHECK-RV64-NEXT:  entry:
650 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 1 x float>
651 // CHECK-RV64-NEXT:    ret <vscale x 1 x float> [[TMP0]]
652 //
test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src)653 vfloat32mf2_t test_vreinterpret_v_u32mf2_f32mf2(vuint32mf2_t src) {
654   return vreinterpret_v_u32mf2_f32mf2(src);
655 }
656 
657 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_f32m1(
658 // CHECK-RV64-NEXT:  entry:
659 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 2 x float>
660 // CHECK-RV64-NEXT:    ret <vscale x 2 x float> [[TMP0]]
661 //
test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src)662 vfloat32m1_t test_vreinterpret_v_u32m1_f32m1(vuint32m1_t src) {
663   return vreinterpret_v_u32m1_f32m1(src);
664 }
665 
666 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_f32m2(
667 // CHECK-RV64-NEXT:  entry:
668 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 4 x float>
669 // CHECK-RV64-NEXT:    ret <vscale x 4 x float> [[TMP0]]
670 //
test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src)671 vfloat32m2_t test_vreinterpret_v_u32m2_f32m2(vuint32m2_t src) {
672   return vreinterpret_v_u32m2_f32m2(src);
673 }
674 
675 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_f32m4(
676 // CHECK-RV64-NEXT:  entry:
677 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 8 x float>
678 // CHECK-RV64-NEXT:    ret <vscale x 8 x float> [[TMP0]]
679 //
test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src)680 vfloat32m4_t test_vreinterpret_v_u32m4_f32m4(vuint32m4_t src) {
681   return vreinterpret_v_u32m4_f32m4(src);
682 }
683 
684 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_f32m8(
685 // CHECK-RV64-NEXT:  entry:
686 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 16 x float>
687 // CHECK-RV64-NEXT:    ret <vscale x 16 x float> [[TMP0]]
688 //
test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src)689 vfloat32m8_t test_vreinterpret_v_u32m8_f32m8(vuint32m8_t src) {
690   return vreinterpret_v_u32m8_f32m8(src);
691 }
692 
693 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_u64m1(
694 // CHECK-RV64-NEXT:  entry:
695 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
696 //
test_vreinterpret_v_i64m1_u64m1(vint64m1_t src)697 vuint64m1_t test_vreinterpret_v_i64m1_u64m1(vint64m1_t src) {
698   return vreinterpret_v_i64m1_u64m1(src);
699 }
700 
701 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_u64m2(
702 // CHECK-RV64-NEXT:  entry:
703 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
704 //
test_vreinterpret_v_i64m2_u64m2(vint64m2_t src)705 vuint64m2_t test_vreinterpret_v_i64m2_u64m2(vint64m2_t src) {
706   return vreinterpret_v_i64m2_u64m2(src);
707 }
708 
709 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_u64m4(
710 // CHECK-RV64-NEXT:  entry:
711 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
712 //
test_vreinterpret_v_i64m4_u64m4(vint64m4_t src)713 vuint64m4_t test_vreinterpret_v_i64m4_u64m4(vint64m4_t src) {
714   return vreinterpret_v_i64m4_u64m4(src);
715 }
716 
717 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_u64m8(
718 // CHECK-RV64-NEXT:  entry:
719 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
720 //
test_vreinterpret_v_i64m8_u64m8(vint64m8_t src)721 vuint64m8_t test_vreinterpret_v_i64m8_u64m8(vint64m8_t src) {
722   return vreinterpret_v_i64m8_u64m8(src);
723 }
724 
725 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_i64m1(
726 // CHECK-RV64-NEXT:  entry:
727 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[SRC:%.*]]
728 //
test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src)729 vint64m1_t test_vreinterpret_v_u64m1_i64m1(vuint64m1_t src) {
730   return vreinterpret_v_u64m1_i64m1(src);
731 }
732 
733 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_i64m2(
734 // CHECK-RV64-NEXT:  entry:
735 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[SRC:%.*]]
736 //
test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src)737 vint64m2_t test_vreinterpret_v_u64m2_i64m2(vuint64m2_t src) {
738   return vreinterpret_v_u64m2_i64m2(src);
739 }
740 
741 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_i64m4(
742 // CHECK-RV64-NEXT:  entry:
743 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[SRC:%.*]]
744 //
test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src)745 vint64m4_t test_vreinterpret_v_u64m4_i64m4(vuint64m4_t src) {
746   return vreinterpret_v_u64m4_i64m4(src);
747 }
748 
749 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_i64m8(
750 // CHECK-RV64-NEXT:  entry:
751 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[SRC:%.*]]
752 //
test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src)753 vint64m8_t test_vreinterpret_v_u64m8_i64m8(vuint64m8_t src) {
754   return vreinterpret_v_u64m8_i64m8(src);
755 }
756 
757 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_i64m1(
758 // CHECK-RV64-NEXT:  entry:
759 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
760 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
761 //
test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src)762 vint64m1_t test_vreinterpret_v_f64m1_i64m1(vfloat64m1_t src) {
763   return vreinterpret_v_f64m1_i64m1(src);
764 }
765 
766 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_i64m2(
767 // CHECK-RV64-NEXT:  entry:
768 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
769 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
770 //
test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src)771 vint64m2_t test_vreinterpret_v_f64m2_i64m2(vfloat64m2_t src) {
772   return vreinterpret_v_f64m2_i64m2(src);
773 }
774 
775 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_i64m4(
776 // CHECK-RV64-NEXT:  entry:
777 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
778 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
779 //
test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src)780 vint64m4_t test_vreinterpret_v_f64m4_i64m4(vfloat64m4_t src) {
781   return vreinterpret_v_f64m4_i64m4(src);
782 }
783 
784 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_i64m8(
785 // CHECK-RV64-NEXT:  entry:
786 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
787 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
788 //
test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src)789 vint64m8_t test_vreinterpret_v_f64m8_i64m8(vfloat64m8_t src) {
790   return vreinterpret_v_f64m8_i64m8(src);
791 }
792 
793 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m1_u64m1(
794 // CHECK-RV64-NEXT:  entry:
795 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x double> [[SRC:%.*]] to <vscale x 1 x i64>
796 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
797 //
test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src)798 vuint64m1_t test_vreinterpret_v_f64m1_u64m1(vfloat64m1_t src) {
799   return vreinterpret_v_f64m1_u64m1(src);
800 }
801 
802 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m2_u64m2(
803 // CHECK-RV64-NEXT:  entry:
804 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x double> [[SRC:%.*]] to <vscale x 2 x i64>
805 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
806 //
test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src)807 vuint64m2_t test_vreinterpret_v_f64m2_u64m2(vfloat64m2_t src) {
808   return vreinterpret_v_f64m2_u64m2(src);
809 }
810 
811 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m4_u64m4(
812 // CHECK-RV64-NEXT:  entry:
813 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x double> [[SRC:%.*]] to <vscale x 4 x i64>
814 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
815 //
test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src)816 vuint64m4_t test_vreinterpret_v_f64m4_u64m4(vfloat64m4_t src) {
817   return vreinterpret_v_f64m4_u64m4(src);
818 }
819 
820 // CHECK-RV64-LABEL: @test_vreinterpret_v_f64m8_u64m8(
821 // CHECK-RV64-NEXT:  entry:
822 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x double> [[SRC:%.*]] to <vscale x 8 x i64>
823 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
824 //
test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src)825 vuint64m8_t test_vreinterpret_v_f64m8_u64m8(vfloat64m8_t src) {
826   return vreinterpret_v_f64m8_u64m8(src);
827 }
828 
829 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_f64m1(
830 // CHECK-RV64-NEXT:  entry:
831 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
832 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
833 //
test_vreinterpret_v_i64m1_f64m1(vint64m1_t src)834 vfloat64m1_t test_vreinterpret_v_i64m1_f64m1(vint64m1_t src) {
835   return vreinterpret_v_i64m1_f64m1(src);
836 }
837 
838 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_f64m2(
839 // CHECK-RV64-NEXT:  entry:
840 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
841 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
842 //
test_vreinterpret_v_i64m2_f64m2(vint64m2_t src)843 vfloat64m2_t test_vreinterpret_v_i64m2_f64m2(vint64m2_t src) {
844   return vreinterpret_v_i64m2_f64m2(src);
845 }
846 
847 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_f64m4(
848 // CHECK-RV64-NEXT:  entry:
849 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
850 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
851 //
test_vreinterpret_v_i64m4_f64m4(vint64m4_t src)852 vfloat64m4_t test_vreinterpret_v_i64m4_f64m4(vint64m4_t src) {
853   return vreinterpret_v_i64m4_f64m4(src);
854 }
855 
856 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_f64m8(
857 // CHECK-RV64-NEXT:  entry:
858 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
859 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
860 //
test_vreinterpret_v_i64m8_f64m8(vint64m8_t src)861 vfloat64m8_t test_vreinterpret_v_i64m8_f64m8(vint64m8_t src) {
862   return vreinterpret_v_i64m8_f64m8(src);
863 }
864 
865 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_f64m1(
866 // CHECK-RV64-NEXT:  entry:
867 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 1 x double>
868 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
869 //
test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src)870 vfloat64m1_t test_vreinterpret_v_u64m1_f64m1(vuint64m1_t src) {
871   return vreinterpret_v_u64m1_f64m1(src);
872 }
873 
874 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_f64m2(
875 // CHECK-RV64-NEXT:  entry:
876 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 2 x double>
877 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
878 //
test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src)879 vfloat64m2_t test_vreinterpret_v_u64m2_f64m2(vuint64m2_t src) {
880   return vreinterpret_v_u64m2_f64m2(src);
881 }
882 
883 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_f64m4(
884 // CHECK-RV64-NEXT:  entry:
885 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 4 x double>
886 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
887 //
test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src)888 vfloat64m4_t test_vreinterpret_v_u64m4_f64m4(vuint64m4_t src) {
889   return vreinterpret_v_u64m4_f64m4(src);
890 }
891 
892 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_f64m8(
893 // CHECK-RV64-NEXT:  entry:
894 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 8 x double>
895 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
896 //
test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src)897 vfloat64m8_t test_vreinterpret_v_u64m8_f64m8(vuint64m8_t src) {
898   return vreinterpret_v_u64m8_f64m8(src);
899 }
900 
901 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf4_i16mf4(
902 // CHECK-RV64-NEXT:  entry:
903 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
904 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
905 //
test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src)906 vint16mf4_t test_vreinterpret_v_i8mf4_i16mf4(vint8mf4_t src) {
907   return vreinterpret_v_i8mf4_i16mf4(src);
908 }
909 
910 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i16mf2(
911 // CHECK-RV64-NEXT:  entry:
912 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
913 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
914 //
test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src)915 vint16mf2_t test_vreinterpret_v_i8mf2_i16mf2(vint8mf2_t src) {
916   return vreinterpret_v_i8mf2_i16mf2(src);
917 }
918 
919 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i16m1(
920 // CHECK-RV64-NEXT:  entry:
921 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
922 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
923 //
test_vreinterpret_v_i8m1_i16m1(vint8m1_t src)924 vint16m1_t test_vreinterpret_v_i8m1_i16m1(vint8m1_t src) {
925   return vreinterpret_v_i8m1_i16m1(src);
926 }
927 
928 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i16m2(
929 // CHECK-RV64-NEXT:  entry:
930 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
931 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
932 //
test_vreinterpret_v_i8m2_i16m2(vint8m2_t src)933 vint16m2_t test_vreinterpret_v_i8m2_i16m2(vint8m2_t src) {
934   return vreinterpret_v_i8m2_i16m2(src);
935 }
936 
937 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i16m4(
938 // CHECK-RV64-NEXT:  entry:
939 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
940 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
941 //
test_vreinterpret_v_i8m4_i16m4(vint8m4_t src)942 vint16m4_t test_vreinterpret_v_i8m4_i16m4(vint8m4_t src) {
943   return vreinterpret_v_i8m4_i16m4(src);
944 }
945 
946 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i16m8(
947 // CHECK-RV64-NEXT:  entry:
948 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
949 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
950 //
test_vreinterpret_v_i8m8_i16m8(vint8m8_t src)951 vint16m8_t test_vreinterpret_v_i8m8_i16m8(vint8m8_t src) {
952   return vreinterpret_v_i8m8_i16m8(src);
953 }
954 
955 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf4_u16mf4(
956 // CHECK-RV64-NEXT:  entry:
957 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i8> [[SRC:%.*]] to <vscale x 1 x i16>
958 // CHECK-RV64-NEXT:    ret <vscale x 1 x i16> [[TMP0]]
959 //
test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src)960 vuint16mf4_t test_vreinterpret_v_u8mf4_u16mf4(vuint8mf4_t src) {
961   return vreinterpret_v_u8mf4_u16mf4(src);
962 }
963 
964 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u16mf2(
965 // CHECK-RV64-NEXT:  entry:
966 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 2 x i16>
967 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
968 //
test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src)969 vuint16mf2_t test_vreinterpret_v_u8mf2_u16mf2(vuint8mf2_t src) {
970   return vreinterpret_v_u8mf2_u16mf2(src);
971 }
972 
973 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u16m1(
974 // CHECK-RV64-NEXT:  entry:
975 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 4 x i16>
976 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
977 //
test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src)978 vuint16m1_t test_vreinterpret_v_u8m1_u16m1(vuint8m1_t src) {
979   return vreinterpret_v_u8m1_u16m1(src);
980 }
981 
982 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u16m2(
983 // CHECK-RV64-NEXT:  entry:
984 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 8 x i16>
985 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
986 //
test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src)987 vuint16m2_t test_vreinterpret_v_u8m2_u16m2(vuint8m2_t src) {
988   return vreinterpret_v_u8m2_u16m2(src);
989 }
990 
991 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u16m4(
992 // CHECK-RV64-NEXT:  entry:
993 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 16 x i16>
994 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
995 //
test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src)996 vuint16m4_t test_vreinterpret_v_u8m4_u16m4(vuint8m4_t src) {
997   return vreinterpret_v_u8m4_u16m4(src);
998 }
999 
1000 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u16m8(
1001 // CHECK-RV64-NEXT:  entry:
1002 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 32 x i16>
1003 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1004 //
test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src)1005 vuint16m8_t test_vreinterpret_v_u8m8_u16m8(vuint8m8_t src) {
1006   return vreinterpret_v_u8m8_u16m8(src);
1007 }
1008 
1009 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8mf2_i32mf2(
1010 // CHECK-RV64-NEXT:  entry:
1011 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
1012 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1013 //
test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src)1014 vint32mf2_t test_vreinterpret_v_i8mf2_i32mf2(vint8mf2_t src) {
1015   return vreinterpret_v_i8mf2_i32mf2(src);
1016 }
1017 
1018 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i32m1(
1019 // CHECK-RV64-NEXT:  entry:
1020 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
1021 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1022 //
test_vreinterpret_v_i8m1_i32m1(vint8m1_t src)1023 vint32m1_t test_vreinterpret_v_i8m1_i32m1(vint8m1_t src) {
1024   return vreinterpret_v_i8m1_i32m1(src);
1025 }
1026 
1027 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i32m2(
1028 // CHECK-RV64-NEXT:  entry:
1029 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
1030 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1031 //
test_vreinterpret_v_i8m2_i32m2(vint8m2_t src)1032 vint32m2_t test_vreinterpret_v_i8m2_i32m2(vint8m2_t src) {
1033   return vreinterpret_v_i8m2_i32m2(src);
1034 }
1035 
1036 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i32m4(
1037 // CHECK-RV64-NEXT:  entry:
1038 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
1039 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1040 //
test_vreinterpret_v_i8m4_i32m4(vint8m4_t src)1041 vint32m4_t test_vreinterpret_v_i8m4_i32m4(vint8m4_t src) {
1042   return vreinterpret_v_i8m4_i32m4(src);
1043 }
1044 
1045 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i32m8(
1046 // CHECK-RV64-NEXT:  entry:
1047 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
1048 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1049 //
test_vreinterpret_v_i8m8_i32m8(vint8m8_t src)1050 vint32m8_t test_vreinterpret_v_i8m8_i32m8(vint8m8_t src) {
1051   return vreinterpret_v_i8m8_i32m8(src);
1052 }
1053 
1054 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8mf2_u32mf2(
1055 // CHECK-RV64-NEXT:  entry:
1056 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i8> [[SRC:%.*]] to <vscale x 1 x i32>
1057 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1058 //
test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src)1059 vuint32mf2_t test_vreinterpret_v_u8mf2_u32mf2(vuint8mf2_t src) {
1060   return vreinterpret_v_u8mf2_u32mf2(src);
1061 }
1062 
1063 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u32m1(
1064 // CHECK-RV64-NEXT:  entry:
1065 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 2 x i32>
1066 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1067 //
test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src)1068 vuint32m1_t test_vreinterpret_v_u8m1_u32m1(vuint8m1_t src) {
1069   return vreinterpret_v_u8m1_u32m1(src);
1070 }
1071 
1072 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u32m2(
1073 // CHECK-RV64-NEXT:  entry:
1074 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 4 x i32>
1075 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1076 //
test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src)1077 vuint32m2_t test_vreinterpret_v_u8m2_u32m2(vuint8m2_t src) {
1078   return vreinterpret_v_u8m2_u32m2(src);
1079 }
1080 
1081 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u32m4(
1082 // CHECK-RV64-NEXT:  entry:
1083 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 8 x i32>
1084 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1085 //
test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src)1086 vuint32m4_t test_vreinterpret_v_u8m4_u32m4(vuint8m4_t src) {
1087   return vreinterpret_v_u8m4_u32m4(src);
1088 }
1089 
1090 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u32m8(
1091 // CHECK-RV64-NEXT:  entry:
1092 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 16 x i32>
1093 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1094 //
test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src)1095 vuint32m8_t test_vreinterpret_v_u8m8_u32m8(vuint8m8_t src) {
1096   return vreinterpret_v_u8m8_u32m8(src);
1097 }
1098 
1099 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m1_i64m1(
1100 // CHECK-RV64-NEXT:  entry:
1101 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
1102 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1103 //
test_vreinterpret_v_i8m1_i64m1(vint8m1_t src)1104 vint64m1_t test_vreinterpret_v_i8m1_i64m1(vint8m1_t src) {
1105   return vreinterpret_v_i8m1_i64m1(src);
1106 }
1107 
1108 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m2_i64m2(
1109 // CHECK-RV64-NEXT:  entry:
1110 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
1111 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1112 //
test_vreinterpret_v_i8m2_i64m2(vint8m2_t src)1113 vint64m2_t test_vreinterpret_v_i8m2_i64m2(vint8m2_t src) {
1114   return vreinterpret_v_i8m2_i64m2(src);
1115 }
1116 
1117 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m4_i64m4(
1118 // CHECK-RV64-NEXT:  entry:
1119 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
1120 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1121 //
test_vreinterpret_v_i8m4_i64m4(vint8m4_t src)1122 vint64m4_t test_vreinterpret_v_i8m4_i64m4(vint8m4_t src) {
1123   return vreinterpret_v_i8m4_i64m4(src);
1124 }
1125 
1126 // CHECK-RV64-LABEL: @test_vreinterpret_v_i8m8_i64m8(
1127 // CHECK-RV64-NEXT:  entry:
1128 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
1129 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1130 //
test_vreinterpret_v_i8m8_i64m8(vint8m8_t src)1131 vint64m8_t test_vreinterpret_v_i8m8_i64m8(vint8m8_t src) {
1132   return vreinterpret_v_i8m8_i64m8(src);
1133 }
1134 
1135 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m1_u64m1(
1136 // CHECK-RV64-NEXT:  entry:
1137 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC:%.*]] to <vscale x 1 x i64>
1138 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1139 //
test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src)1140 vuint64m1_t test_vreinterpret_v_u8m1_u64m1(vuint8m1_t src) {
1141   return vreinterpret_v_u8m1_u64m1(src);
1142 }
1143 
1144 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m2_u64m2(
1145 // CHECK-RV64-NEXT:  entry:
1146 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i8> [[SRC:%.*]] to <vscale x 2 x i64>
1147 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1148 //
test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src)1149 vuint64m2_t test_vreinterpret_v_u8m2_u64m2(vuint8m2_t src) {
1150   return vreinterpret_v_u8m2_u64m2(src);
1151 }
1152 
1153 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m4_u64m4(
1154 // CHECK-RV64-NEXT:  entry:
1155 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i8> [[SRC:%.*]] to <vscale x 4 x i64>
1156 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1157 //
test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src)1158 vuint64m4_t test_vreinterpret_v_u8m4_u64m4(vuint8m4_t src) {
1159   return vreinterpret_v_u8m4_u64m4(src);
1160 }
1161 
1162 // CHECK-RV64-LABEL: @test_vreinterpret_v_u8m8_u64m8(
1163 // CHECK-RV64-NEXT:  entry:
1164 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 64 x i8> [[SRC:%.*]] to <vscale x 8 x i64>
1165 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1166 //
test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src)1167 vuint64m8_t test_vreinterpret_v_u8m8_u64m8(vuint8m8_t src) {
1168   return vreinterpret_v_u8m8_u64m8(src);
1169 }
1170 
1171 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf4_i8mf4(
1172 // CHECK-RV64-NEXT:  entry:
1173 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
1174 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
1175 //
test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src)1176 vint8mf4_t test_vreinterpret_v_i16mf4_i8mf4(vint16mf4_t src) {
1177   return vreinterpret_v_i16mf4_i8mf4(src);
1178 }
1179 
1180 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i8mf2(
1181 // CHECK-RV64-NEXT:  entry:
1182 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
1183 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1184 //
test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src)1185 vint8mf2_t test_vreinterpret_v_i16mf2_i8mf2(vint16mf2_t src) {
1186   return vreinterpret_v_i16mf2_i8mf2(src);
1187 }
1188 
1189 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i8m1(
1190 // CHECK-RV64-NEXT:  entry:
1191 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
1192 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1193 //
test_vreinterpret_v_i16m1_i8m1(vint16m1_t src)1194 vint8m1_t test_vreinterpret_v_i16m1_i8m1(vint16m1_t src) {
1195   return vreinterpret_v_i16m1_i8m1(src);
1196 }
1197 
1198 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i8m2(
1199 // CHECK-RV64-NEXT:  entry:
1200 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
1201 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1202 //
test_vreinterpret_v_i16m2_i8m2(vint16m2_t src)1203 vint8m2_t test_vreinterpret_v_i16m2_i8m2(vint16m2_t src) {
1204   return vreinterpret_v_i16m2_i8m2(src);
1205 }
1206 
1207 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i8m4(
1208 // CHECK-RV64-NEXT:  entry:
1209 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
1210 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1211 //
test_vreinterpret_v_i16m4_i8m4(vint16m4_t src)1212 vint8m4_t test_vreinterpret_v_i16m4_i8m4(vint16m4_t src) {
1213   return vreinterpret_v_i16m4_i8m4(src);
1214 }
1215 
1216 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i8m8(
1217 // CHECK-RV64-NEXT:  entry:
1218 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1219 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1220 //
test_vreinterpret_v_i16m8_i8m8(vint16m8_t src)1221 vint8m8_t test_vreinterpret_v_i16m8_i8m8(vint16m8_t src) {
1222   return vreinterpret_v_i16m8_i8m8(src);
1223 }
1224 
1225 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf4_u8mf4(
1226 // CHECK-RV64-NEXT:  entry:
1227 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i16> [[SRC:%.*]] to <vscale x 2 x i8>
1228 // CHECK-RV64-NEXT:    ret <vscale x 2 x i8> [[TMP0]]
1229 //
test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src)1230 vuint8mf4_t test_vreinterpret_v_u16mf4_u8mf4(vuint16mf4_t src) {
1231   return vreinterpret_v_u16mf4_u8mf4(src);
1232 }
1233 
1234 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u8mf2(
1235 // CHECK-RV64-NEXT:  entry:
1236 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 4 x i8>
1237 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1238 //
test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src)1239 vuint8mf2_t test_vreinterpret_v_u16mf2_u8mf2(vuint16mf2_t src) {
1240   return vreinterpret_v_u16mf2_u8mf2(src);
1241 }
1242 
1243 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u8m1(
1244 // CHECK-RV64-NEXT:  entry:
1245 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 8 x i8>
1246 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1247 //
test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src)1248 vuint8m1_t test_vreinterpret_v_u16m1_u8m1(vuint16m1_t src) {
1249   return vreinterpret_v_u16m1_u8m1(src);
1250 }
1251 
1252 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u8m2(
1253 // CHECK-RV64-NEXT:  entry:
1254 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 16 x i8>
1255 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1256 //
test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src)1257 vuint8m2_t test_vreinterpret_v_u16m2_u8m2(vuint16m2_t src) {
1258   return vreinterpret_v_u16m2_u8m2(src);
1259 }
1260 
1261 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u8m4(
1262 // CHECK-RV64-NEXT:  entry:
1263 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 32 x i8>
1264 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1265 //
test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src)1266 vuint8m4_t test_vreinterpret_v_u16m4_u8m4(vuint16m4_t src) {
1267   return vreinterpret_v_u16m4_u8m4(src);
1268 }
1269 
1270 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u8m8(
1271 // CHECK-RV64-NEXT:  entry:
1272 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 64 x i8>
1273 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1274 //
test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src)1275 vuint8m8_t test_vreinterpret_v_u16m8_u8m8(vuint16m8_t src) {
1276   return vreinterpret_v_u16m8_u8m8(src);
1277 }
1278 
1279 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16mf2_i32mf2(
1280 // CHECK-RV64-NEXT:  entry:
1281 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1282 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1283 //
test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src)1284 vint32mf2_t test_vreinterpret_v_i16mf2_i32mf2(vint16mf2_t src) {
1285   return vreinterpret_v_i16mf2_i32mf2(src);
1286 }
1287 
1288 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i32m1(
1289 // CHECK-RV64-NEXT:  entry:
1290 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1291 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1292 //
test_vreinterpret_v_i16m1_i32m1(vint16m1_t src)1293 vint32m1_t test_vreinterpret_v_i16m1_i32m1(vint16m1_t src) {
1294   return vreinterpret_v_i16m1_i32m1(src);
1295 }
1296 
1297 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i32m2(
1298 // CHECK-RV64-NEXT:  entry:
1299 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1300 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1301 //
test_vreinterpret_v_i16m2_i32m2(vint16m2_t src)1302 vint32m2_t test_vreinterpret_v_i16m2_i32m2(vint16m2_t src) {
1303   return vreinterpret_v_i16m2_i32m2(src);
1304 }
1305 
1306 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i32m4(
1307 // CHECK-RV64-NEXT:  entry:
1308 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1309 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1310 //
test_vreinterpret_v_i16m4_i32m4(vint16m4_t src)1311 vint32m4_t test_vreinterpret_v_i16m4_i32m4(vint16m4_t src) {
1312   return vreinterpret_v_i16m4_i32m4(src);
1313 }
1314 
1315 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i32m8(
1316 // CHECK-RV64-NEXT:  entry:
1317 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1318 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1319 //
test_vreinterpret_v_i16m8_i32m8(vint16m8_t src)1320 vint32m8_t test_vreinterpret_v_i16m8_i32m8(vint16m8_t src) {
1321   return vreinterpret_v_i16m8_i32m8(src);
1322 }
1323 
1324 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16mf2_u32mf2(
1325 // CHECK-RV64-NEXT:  entry:
1326 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i16> [[SRC:%.*]] to <vscale x 1 x i32>
1327 // CHECK-RV64-NEXT:    ret <vscale x 1 x i32> [[TMP0]]
1328 //
test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src)1329 vuint32mf2_t test_vreinterpret_v_u16mf2_u32mf2(vuint16mf2_t src) {
1330   return vreinterpret_v_u16mf2_u32mf2(src);
1331 }
1332 
1333 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u32m1(
1334 // CHECK-RV64-NEXT:  entry:
1335 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 2 x i32>
1336 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1337 //
test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src)1338 vuint32m1_t test_vreinterpret_v_u16m1_u32m1(vuint16m1_t src) {
1339   return vreinterpret_v_u16m1_u32m1(src);
1340 }
1341 
1342 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u32m2(
1343 // CHECK-RV64-NEXT:  entry:
1344 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 4 x i32>
1345 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1346 //
test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src)1347 vuint32m2_t test_vreinterpret_v_u16m2_u32m2(vuint16m2_t src) {
1348   return vreinterpret_v_u16m2_u32m2(src);
1349 }
1350 
1351 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u32m4(
1352 // CHECK-RV64-NEXT:  entry:
1353 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 8 x i32>
1354 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1355 //
test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src)1356 vuint32m4_t test_vreinterpret_v_u16m4_u32m4(vuint16m4_t src) {
1357   return vreinterpret_v_u16m4_u32m4(src);
1358 }
1359 
1360 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u32m8(
1361 // CHECK-RV64-NEXT:  entry:
1362 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 16 x i32>
1363 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1364 //
test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src)1365 vuint32m8_t test_vreinterpret_v_u16m8_u32m8(vuint16m8_t src) {
1366   return vreinterpret_v_u16m8_u32m8(src);
1367 }
1368 
1369 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m1_i64m1(
1370 // CHECK-RV64-NEXT:  entry:
1371 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1372 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1373 //
test_vreinterpret_v_i16m1_i64m1(vint16m1_t src)1374 vint64m1_t test_vreinterpret_v_i16m1_i64m1(vint16m1_t src) {
1375   return vreinterpret_v_i16m1_i64m1(src);
1376 }
1377 
1378 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m2_i64m2(
1379 // CHECK-RV64-NEXT:  entry:
1380 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1381 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1382 //
test_vreinterpret_v_i16m2_i64m2(vint16m2_t src)1383 vint64m2_t test_vreinterpret_v_i16m2_i64m2(vint16m2_t src) {
1384   return vreinterpret_v_i16m2_i64m2(src);
1385 }
1386 
1387 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m4_i64m4(
1388 // CHECK-RV64-NEXT:  entry:
1389 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1390 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1391 //
test_vreinterpret_v_i16m4_i64m4(vint16m4_t src)1392 vint64m4_t test_vreinterpret_v_i16m4_i64m4(vint16m4_t src) {
1393   return vreinterpret_v_i16m4_i64m4(src);
1394 }
1395 
1396 // CHECK-RV64-LABEL: @test_vreinterpret_v_i16m8_i64m8(
1397 // CHECK-RV64-NEXT:  entry:
1398 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1399 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1400 //
test_vreinterpret_v_i16m8_i64m8(vint16m8_t src)1401 vint64m8_t test_vreinterpret_v_i16m8_i64m8(vint16m8_t src) {
1402   return vreinterpret_v_i16m8_i64m8(src);
1403 }
1404 
1405 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m1_u64m1(
1406 // CHECK-RV64-NEXT:  entry:
1407 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC:%.*]] to <vscale x 1 x i64>
1408 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1409 //
test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src)1410 vuint64m1_t test_vreinterpret_v_u16m1_u64m1(vuint16m1_t src) {
1411   return vreinterpret_v_u16m1_u64m1(src);
1412 }
1413 
1414 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m2_u64m2(
1415 // CHECK-RV64-NEXT:  entry:
1416 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i16> [[SRC:%.*]] to <vscale x 2 x i64>
1417 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1418 //
test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src)1419 vuint64m2_t test_vreinterpret_v_u16m2_u64m2(vuint16m2_t src) {
1420   return vreinterpret_v_u16m2_u64m2(src);
1421 }
1422 
1423 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m4_u64m4(
1424 // CHECK-RV64-NEXT:  entry:
1425 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i16> [[SRC:%.*]] to <vscale x 4 x i64>
1426 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1427 //
test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src)1428 vuint64m4_t test_vreinterpret_v_u16m4_u64m4(vuint16m4_t src) {
1429   return vreinterpret_v_u16m4_u64m4(src);
1430 }
1431 
1432 // CHECK-RV64-LABEL: @test_vreinterpret_v_u16m8_u64m8(
1433 // CHECK-RV64-NEXT:  entry:
1434 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 32 x i16> [[SRC:%.*]] to <vscale x 8 x i64>
1435 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1436 //
test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src)1437 vuint64m8_t test_vreinterpret_v_u16m8_u64m8(vuint16m8_t src) {
1438   return vreinterpret_v_u16m8_u64m8(src);
1439 }
1440 
1441 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i8mf2(
1442 // CHECK-RV64-NEXT:  entry:
1443 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1444 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1445 //
test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src)1446 vint8mf2_t test_vreinterpret_v_i32mf2_i8mf2(vint32mf2_t src) {
1447   return vreinterpret_v_i32mf2_i8mf2(src);
1448 }
1449 
1450 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i8m1(
1451 // CHECK-RV64-NEXT:  entry:
1452 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1453 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1454 //
test_vreinterpret_v_i32m1_i8m1(vint32m1_t src)1455 vint8m1_t test_vreinterpret_v_i32m1_i8m1(vint32m1_t src) {
1456   return vreinterpret_v_i32m1_i8m1(src);
1457 }
1458 
1459 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i8m2(
1460 // CHECK-RV64-NEXT:  entry:
1461 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1462 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1463 //
test_vreinterpret_v_i32m2_i8m2(vint32m2_t src)1464 vint8m2_t test_vreinterpret_v_i32m2_i8m2(vint32m2_t src) {
1465   return vreinterpret_v_i32m2_i8m2(src);
1466 }
1467 
1468 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i8m4(
1469 // CHECK-RV64-NEXT:  entry:
1470 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1471 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1472 //
test_vreinterpret_v_i32m4_i8m4(vint32m4_t src)1473 vint8m4_t test_vreinterpret_v_i32m4_i8m4(vint32m4_t src) {
1474   return vreinterpret_v_i32m4_i8m4(src);
1475 }
1476 
1477 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i8m8(
1478 // CHECK-RV64-NEXT:  entry:
1479 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1480 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1481 //
test_vreinterpret_v_i32m8_i8m8(vint32m8_t src)1482 vint8m8_t test_vreinterpret_v_i32m8_i8m8(vint32m8_t src) {
1483   return vreinterpret_v_i32m8_i8m8(src);
1484 }
1485 
1486 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u8mf2(
1487 // CHECK-RV64-NEXT:  entry:
1488 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 4 x i8>
1489 // CHECK-RV64-NEXT:    ret <vscale x 4 x i8> [[TMP0]]
1490 //
test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src)1491 vuint8mf2_t test_vreinterpret_v_u32mf2_u8mf2(vuint32mf2_t src) {
1492   return vreinterpret_v_u32mf2_u8mf2(src);
1493 }
1494 
1495 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u8m1(
1496 // CHECK-RV64-NEXT:  entry:
1497 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 8 x i8>
1498 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1499 //
test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src)1500 vuint8m1_t test_vreinterpret_v_u32m1_u8m1(vuint32m1_t src) {
1501   return vreinterpret_v_u32m1_u8m1(src);
1502 }
1503 
1504 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u8m2(
1505 // CHECK-RV64-NEXT:  entry:
1506 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 16 x i8>
1507 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1508 //
test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src)1509 vuint8m2_t test_vreinterpret_v_u32m2_u8m2(vuint32m2_t src) {
1510   return vreinterpret_v_u32m2_u8m2(src);
1511 }
1512 
1513 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u8m4(
1514 // CHECK-RV64-NEXT:  entry:
1515 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 32 x i8>
1516 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1517 //
test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src)1518 vuint8m4_t test_vreinterpret_v_u32m4_u8m4(vuint32m4_t src) {
1519   return vreinterpret_v_u32m4_u8m4(src);
1520 }
1521 
1522 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u8m8(
1523 // CHECK-RV64-NEXT:  entry:
1524 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 64 x i8>
1525 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1526 //
test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src)1527 vuint8m8_t test_vreinterpret_v_u32m8_u8m8(vuint32m8_t src) {
1528   return vreinterpret_v_u32m8_u8m8(src);
1529 }
1530 
1531 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32mf2_i16mf2(
1532 // CHECK-RV64-NEXT:  entry:
1533 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1534 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1535 //
test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src)1536 vint16mf2_t test_vreinterpret_v_i32mf2_i16mf2(vint32mf2_t src) {
1537   return vreinterpret_v_i32mf2_i16mf2(src);
1538 }
1539 
1540 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i16m1(
1541 // CHECK-RV64-NEXT:  entry:
1542 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1543 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1544 //
test_vreinterpret_v_i32m1_i16m1(vint32m1_t src)1545 vint16m1_t test_vreinterpret_v_i32m1_i16m1(vint32m1_t src) {
1546   return vreinterpret_v_i32m1_i16m1(src);
1547 }
1548 
1549 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i16m2(
1550 // CHECK-RV64-NEXT:  entry:
1551 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1552 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1553 //
test_vreinterpret_v_i32m2_i16m2(vint32m2_t src)1554 vint16m2_t test_vreinterpret_v_i32m2_i16m2(vint32m2_t src) {
1555   return vreinterpret_v_i32m2_i16m2(src);
1556 }
1557 
1558 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i16m4(
1559 // CHECK-RV64-NEXT:  entry:
1560 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1561 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1562 //
test_vreinterpret_v_i32m4_i16m4(vint32m4_t src)1563 vint16m4_t test_vreinterpret_v_i32m4_i16m4(vint32m4_t src) {
1564   return vreinterpret_v_i32m4_i16m4(src);
1565 }
1566 
1567 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i16m8(
1568 // CHECK-RV64-NEXT:  entry:
1569 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1570 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1571 //
test_vreinterpret_v_i32m8_i16m8(vint32m8_t src)1572 vint16m8_t test_vreinterpret_v_i32m8_i16m8(vint32m8_t src) {
1573   return vreinterpret_v_i32m8_i16m8(src);
1574 }
1575 
1576 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32mf2_u16mf2(
1577 // CHECK-RV64-NEXT:  entry:
1578 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i32> [[SRC:%.*]] to <vscale x 2 x i16>
1579 // CHECK-RV64-NEXT:    ret <vscale x 2 x i16> [[TMP0]]
1580 //
test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src)1581 vuint16mf2_t test_vreinterpret_v_u32mf2_u16mf2(vuint32mf2_t src) {
1582   return vreinterpret_v_u32mf2_u16mf2(src);
1583 }
1584 
1585 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u16m1(
1586 // CHECK-RV64-NEXT:  entry:
1587 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 4 x i16>
1588 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1589 //
test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src)1590 vuint16m1_t test_vreinterpret_v_u32m1_u16m1(vuint32m1_t src) {
1591   return vreinterpret_v_u32m1_u16m1(src);
1592 }
1593 
1594 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u16m2(
1595 // CHECK-RV64-NEXT:  entry:
1596 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 8 x i16>
1597 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1598 //
test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src)1599 vuint16m2_t test_vreinterpret_v_u32m2_u16m2(vuint32m2_t src) {
1600   return vreinterpret_v_u32m2_u16m2(src);
1601 }
1602 
1603 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u16m4(
1604 // CHECK-RV64-NEXT:  entry:
1605 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 16 x i16>
1606 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1607 //
test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src)1608 vuint16m4_t test_vreinterpret_v_u32m4_u16m4(vuint32m4_t src) {
1609   return vreinterpret_v_u32m4_u16m4(src);
1610 }
1611 
1612 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u16m8(
1613 // CHECK-RV64-NEXT:  entry:
1614 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 32 x i16>
1615 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1616 //
test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src)1617 vuint16m8_t test_vreinterpret_v_u32m8_u16m8(vuint32m8_t src) {
1618   return vreinterpret_v_u32m8_u16m8(src);
1619 }
1620 
1621 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m1_i64m1(
1622 // CHECK-RV64-NEXT:  entry:
1623 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1624 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1625 //
test_vreinterpret_v_i32m1_i64m1(vint32m1_t src)1626 vint64m1_t test_vreinterpret_v_i32m1_i64m1(vint32m1_t src) {
1627   return vreinterpret_v_i32m1_i64m1(src);
1628 }
1629 
1630 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m2_i64m2(
1631 // CHECK-RV64-NEXT:  entry:
1632 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1633 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1634 //
test_vreinterpret_v_i32m2_i64m2(vint32m2_t src)1635 vint64m2_t test_vreinterpret_v_i32m2_i64m2(vint32m2_t src) {
1636   return vreinterpret_v_i32m2_i64m2(src);
1637 }
1638 
1639 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m4_i64m4(
1640 // CHECK-RV64-NEXT:  entry:
1641 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1642 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1643 //
test_vreinterpret_v_i32m4_i64m4(vint32m4_t src)1644 vint64m4_t test_vreinterpret_v_i32m4_i64m4(vint32m4_t src) {
1645   return vreinterpret_v_i32m4_i64m4(src);
1646 }
1647 
1648 // CHECK-RV64-LABEL: @test_vreinterpret_v_i32m8_i64m8(
1649 // CHECK-RV64-NEXT:  entry:
1650 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1651 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1652 //
test_vreinterpret_v_i32m8_i64m8(vint32m8_t src)1653 vint64m8_t test_vreinterpret_v_i32m8_i64m8(vint32m8_t src) {
1654   return vreinterpret_v_i32m8_i64m8(src);
1655 }
1656 
1657 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m1_u64m1(
1658 // CHECK-RV64-NEXT:  entry:
1659 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC:%.*]] to <vscale x 1 x i64>
1660 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
1661 //
test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src)1662 vuint64m1_t test_vreinterpret_v_u32m1_u64m1(vuint32m1_t src) {
1663   return vreinterpret_v_u32m1_u64m1(src);
1664 }
1665 
1666 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m2_u64m2(
1667 // CHECK-RV64-NEXT:  entry:
1668 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i32> [[SRC:%.*]] to <vscale x 2 x i64>
1669 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
1670 //
test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src)1671 vuint64m2_t test_vreinterpret_v_u32m2_u64m2(vuint32m2_t src) {
1672   return vreinterpret_v_u32m2_u64m2(src);
1673 }
1674 
1675 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m4_u64m4(
1676 // CHECK-RV64-NEXT:  entry:
1677 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i32> [[SRC:%.*]] to <vscale x 4 x i64>
1678 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
1679 //
test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src)1680 vuint64m4_t test_vreinterpret_v_u32m4_u64m4(vuint32m4_t src) {
1681   return vreinterpret_v_u32m4_u64m4(src);
1682 }
1683 
1684 // CHECK-RV64-LABEL: @test_vreinterpret_v_u32m8_u64m8(
1685 // CHECK-RV64-NEXT:  entry:
1686 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 16 x i32> [[SRC:%.*]] to <vscale x 8 x i64>
1687 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
1688 //
test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src)1689 vuint64m8_t test_vreinterpret_v_u32m8_u64m8(vuint32m8_t src) {
1690   return vreinterpret_v_u32m8_u64m8(src);
1691 }
1692 
1693 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i8m1(
1694 // CHECK-RV64-NEXT:  entry:
1695 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1696 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1697 //
test_vreinterpret_v_i64m1_i8m1(vint64m1_t src)1698 vint8m1_t test_vreinterpret_v_i64m1_i8m1(vint64m1_t src) {
1699   return vreinterpret_v_i64m1_i8m1(src);
1700 }
1701 
1702 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i8m2(
1703 // CHECK-RV64-NEXT:  entry:
1704 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1705 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1706 //
test_vreinterpret_v_i64m2_i8m2(vint64m2_t src)1707 vint8m2_t test_vreinterpret_v_i64m2_i8m2(vint64m2_t src) {
1708   return vreinterpret_v_i64m2_i8m2(src);
1709 }
1710 
1711 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i8m4(
1712 // CHECK-RV64-NEXT:  entry:
1713 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1714 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1715 //
test_vreinterpret_v_i64m4_i8m4(vint64m4_t src)1716 vint8m4_t test_vreinterpret_v_i64m4_i8m4(vint64m4_t src) {
1717   return vreinterpret_v_i64m4_i8m4(src);
1718 }
1719 
1720 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i8m8(
1721 // CHECK-RV64-NEXT:  entry:
1722 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1723 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1724 //
test_vreinterpret_v_i64m8_i8m8(vint64m8_t src)1725 vint8m8_t test_vreinterpret_v_i64m8_i8m8(vint64m8_t src) {
1726   return vreinterpret_v_i64m8_i8m8(src);
1727 }
1728 
1729 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u8m1(
1730 // CHECK-RV64-NEXT:  entry:
1731 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 8 x i8>
1732 // CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP0]]
1733 //
test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src)1734 vuint8m1_t test_vreinterpret_v_u64m1_u8m1(vuint64m1_t src) {
1735   return vreinterpret_v_u64m1_u8m1(src);
1736 }
1737 
1738 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u8m2(
1739 // CHECK-RV64-NEXT:  entry:
1740 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 16 x i8>
1741 // CHECK-RV64-NEXT:    ret <vscale x 16 x i8> [[TMP0]]
1742 //
test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src)1743 vuint8m2_t test_vreinterpret_v_u64m2_u8m2(vuint64m2_t src) {
1744   return vreinterpret_v_u64m2_u8m2(src);
1745 }
1746 
1747 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u8m4(
1748 // CHECK-RV64-NEXT:  entry:
1749 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 32 x i8>
1750 // CHECK-RV64-NEXT:    ret <vscale x 32 x i8> [[TMP0]]
1751 //
test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src)1752 vuint8m4_t test_vreinterpret_v_u64m4_u8m4(vuint64m4_t src) {
1753   return vreinterpret_v_u64m4_u8m4(src);
1754 }
1755 
1756 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u8m8(
1757 // CHECK-RV64-NEXT:  entry:
1758 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 64 x i8>
1759 // CHECK-RV64-NEXT:    ret <vscale x 64 x i8> [[TMP0]]
1760 //
test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src)1761 vuint8m8_t test_vreinterpret_v_u64m8_u8m8(vuint64m8_t src) {
1762   return vreinterpret_v_u64m8_u8m8(src);
1763 }
1764 
1765 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i16m1(
1766 // CHECK-RV64-NEXT:  entry:
1767 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1768 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1769 //
test_vreinterpret_v_i64m1_i16m1(vint64m1_t src)1770 vint16m1_t test_vreinterpret_v_i64m1_i16m1(vint64m1_t src) {
1771   return vreinterpret_v_i64m1_i16m1(src);
1772 }
1773 
1774 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i16m2(
1775 // CHECK-RV64-NEXT:  entry:
1776 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1777 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1778 //
test_vreinterpret_v_i64m2_i16m2(vint64m2_t src)1779 vint16m2_t test_vreinterpret_v_i64m2_i16m2(vint64m2_t src) {
1780   return vreinterpret_v_i64m2_i16m2(src);
1781 }
1782 
1783 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i16m4(
1784 // CHECK-RV64-NEXT:  entry:
1785 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1786 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1787 //
test_vreinterpret_v_i64m4_i16m4(vint64m4_t src)1788 vint16m4_t test_vreinterpret_v_i64m4_i16m4(vint64m4_t src) {
1789   return vreinterpret_v_i64m4_i16m4(src);
1790 }
1791 
1792 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i16m8(
1793 // CHECK-RV64-NEXT:  entry:
1794 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1795 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1796 //
test_vreinterpret_v_i64m8_i16m8(vint64m8_t src)1797 vint16m8_t test_vreinterpret_v_i64m8_i16m8(vint64m8_t src) {
1798   return vreinterpret_v_i64m8_i16m8(src);
1799 }
1800 
1801 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u16m1(
1802 // CHECK-RV64-NEXT:  entry:
1803 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 4 x i16>
1804 // CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP0]]
1805 //
test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src)1806 vuint16m1_t test_vreinterpret_v_u64m1_u16m1(vuint64m1_t src) {
1807   return vreinterpret_v_u64m1_u16m1(src);
1808 }
1809 
1810 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u16m2(
1811 // CHECK-RV64-NEXT:  entry:
1812 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 8 x i16>
1813 // CHECK-RV64-NEXT:    ret <vscale x 8 x i16> [[TMP0]]
1814 //
test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src)1815 vuint16m2_t test_vreinterpret_v_u64m2_u16m2(vuint64m2_t src) {
1816   return vreinterpret_v_u64m2_u16m2(src);
1817 }
1818 
1819 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u16m4(
1820 // CHECK-RV64-NEXT:  entry:
1821 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 16 x i16>
1822 // CHECK-RV64-NEXT:    ret <vscale x 16 x i16> [[TMP0]]
1823 //
test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src)1824 vuint16m4_t test_vreinterpret_v_u64m4_u16m4(vuint64m4_t src) {
1825   return vreinterpret_v_u64m4_u16m4(src);
1826 }
1827 
1828 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u16m8(
1829 // CHECK-RV64-NEXT:  entry:
1830 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 32 x i16>
1831 // CHECK-RV64-NEXT:    ret <vscale x 32 x i16> [[TMP0]]
1832 //
test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src)1833 vuint16m8_t test_vreinterpret_v_u64m8_u16m8(vuint64m8_t src) {
1834   return vreinterpret_v_u64m8_u16m8(src);
1835 }
1836 
1837 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m1_i32m1(
1838 // CHECK-RV64-NEXT:  entry:
1839 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1840 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1841 //
test_vreinterpret_v_i64m1_i32m1(vint64m1_t src)1842 vint32m1_t test_vreinterpret_v_i64m1_i32m1(vint64m1_t src) {
1843   return vreinterpret_v_i64m1_i32m1(src);
1844 }
1845 
1846 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m2_i32m2(
1847 // CHECK-RV64-NEXT:  entry:
1848 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1849 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1850 //
test_vreinterpret_v_i64m2_i32m2(vint64m2_t src)1851 vint32m2_t test_vreinterpret_v_i64m2_i32m2(vint64m2_t src) {
1852   return vreinterpret_v_i64m2_i32m2(src);
1853 }
1854 
1855 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m4_i32m4(
1856 // CHECK-RV64-NEXT:  entry:
1857 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1858 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1859 //
test_vreinterpret_v_i64m4_i32m4(vint64m4_t src)1860 vint32m4_t test_vreinterpret_v_i64m4_i32m4(vint64m4_t src) {
1861   return vreinterpret_v_i64m4_i32m4(src);
1862 }
1863 
1864 // CHECK-RV64-LABEL: @test_vreinterpret_v_i64m8_i32m8(
1865 // CHECK-RV64-NEXT:  entry:
1866 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1867 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1868 //
test_vreinterpret_v_i64m8_i32m8(vint64m8_t src)1869 vint32m8_t test_vreinterpret_v_i64m8_i32m8(vint64m8_t src) {
1870   return vreinterpret_v_i64m8_i32m8(src);
1871 }
1872 
1873 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m1_u32m1(
1874 // CHECK-RV64-NEXT:  entry:
1875 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC:%.*]] to <vscale x 2 x i32>
1876 // CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP0]]
1877 //
test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src)1878 vuint32m1_t test_vreinterpret_v_u64m1_u32m1(vuint64m1_t src) {
1879   return vreinterpret_v_u64m1_u32m1(src);
1880 }
1881 
1882 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m2_u32m2(
1883 // CHECK-RV64-NEXT:  entry:
1884 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i64> [[SRC:%.*]] to <vscale x 4 x i32>
1885 // CHECK-RV64-NEXT:    ret <vscale x 4 x i32> [[TMP0]]
1886 //
test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src)1887 vuint32m2_t test_vreinterpret_v_u64m2_u32m2(vuint64m2_t src) {
1888   return vreinterpret_v_u64m2_u32m2(src);
1889 }
1890 
1891 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m4_u32m4(
1892 // CHECK-RV64-NEXT:  entry:
1893 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i64> [[SRC:%.*]] to <vscale x 8 x i32>
1894 // CHECK-RV64-NEXT:    ret <vscale x 8 x i32> [[TMP0]]
1895 //
test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src)1896 vuint32m4_t test_vreinterpret_v_u64m4_u32m4(vuint64m4_t src) {
1897   return vreinterpret_v_u64m4_u32m4(src);
1898 }
1899 
1900 // CHECK-RV64-LABEL: @test_vreinterpret_v_u64m8_u32m8(
1901 // CHECK-RV64-NEXT:  entry:
1902 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i64> [[SRC:%.*]] to <vscale x 16 x i32>
1903 // CHECK-RV64-NEXT:    ret <vscale x 16 x i32> [[TMP0]]
1904 //
test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src)1905 vuint32m8_t test_vreinterpret_v_u64m8_u32m8 (vuint64m8_t src) {
1906   return vreinterpret_v_u64m8_u32m8(src);
1907 }
1908 
1909