1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \
4 // RUN:   -target-feature +experimental-v \
5 // RUN:   -target-feature +experimental-zvlsseg -disable-O0-optnone -emit-llvm %s \
6 // RUN:   -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
7 
8 #include <riscv_vector.h>
9 
10 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf8(
11 // CHECK-RV64-NEXT:  entry:
12 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
13 // CHECK-RV64-NEXT:    ret void
14 //
test_vsuxseg2ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)15 void test_vsuxseg2ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
16   return vsuxseg2ei8_v_i8mf8(base, bindex, v0, v1, vl);
17 }
18 
19 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf8(
20 // CHECK-RV64-NEXT:  entry:
21 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
22 // CHECK-RV64-NEXT:    ret void
23 //
test_vsuxseg3ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)24 void test_vsuxseg3ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
25   return vsuxseg3ei8_v_i8mf8(base, bindex, v0, v1, v2, vl);
26 }
27 
28 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf8(
29 // CHECK-RV64-NEXT:  entry:
30 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
31 // CHECK-RV64-NEXT:    ret void
32 //
test_vsuxseg4ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)33 void test_vsuxseg4ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
34   return vsuxseg4ei8_v_i8mf8(base, bindex, v0, v1, v2, v3, vl);
35 }
36 
37 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf8(
38 // CHECK-RV64-NEXT:  entry:
39 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
40 // CHECK-RV64-NEXT:    ret void
41 //
test_vsuxseg5ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)42 void test_vsuxseg5ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
43   return vsuxseg5ei8_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
44 }
45 
46 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf8(
47 // CHECK-RV64-NEXT:  entry:
48 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
49 // CHECK-RV64-NEXT:    ret void
50 //
test_vsuxseg6ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)51 void test_vsuxseg6ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
52   return vsuxseg6ei8_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
53 }
54 
55 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf8(
56 // CHECK-RV64-NEXT:  entry:
57 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
58 // CHECK-RV64-NEXT:    ret void
59 //
test_vsuxseg7ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)60 void test_vsuxseg7ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
61   return vsuxseg7ei8_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
62 }
63 
64 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf8(
65 // CHECK-RV64-NEXT:  entry:
66 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
67 // CHECK-RV64-NEXT:    ret void
68 //
test_vsuxseg8ei8_v_i8mf8(int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)69 void test_vsuxseg8ei8_v_i8mf8 (int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
70   return vsuxseg8ei8_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
71 }
72 
73 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf4(
74 // CHECK-RV64-NEXT:  entry:
75 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
76 // CHECK-RV64-NEXT:    ret void
77 //
test_vsuxseg2ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)78 void test_vsuxseg2ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
79   return vsuxseg2ei8_v_i8mf4(base, bindex, v0, v1, vl);
80 }
81 
82 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf4(
83 // CHECK-RV64-NEXT:  entry:
84 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
85 // CHECK-RV64-NEXT:    ret void
86 //
test_vsuxseg3ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)87 void test_vsuxseg3ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
88   return vsuxseg3ei8_v_i8mf4(base, bindex, v0, v1, v2, vl);
89 }
90 
91 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf4(
92 // CHECK-RV64-NEXT:  entry:
93 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
94 // CHECK-RV64-NEXT:    ret void
95 //
test_vsuxseg4ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)96 void test_vsuxseg4ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
97   return vsuxseg4ei8_v_i8mf4(base, bindex, v0, v1, v2, v3, vl);
98 }
99 
100 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf4(
101 // CHECK-RV64-NEXT:  entry:
102 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
103 // CHECK-RV64-NEXT:    ret void
104 //
test_vsuxseg5ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)105 void test_vsuxseg5ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
106   return vsuxseg5ei8_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
107 }
108 
109 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf4(
110 // CHECK-RV64-NEXT:  entry:
111 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
112 // CHECK-RV64-NEXT:    ret void
113 //
test_vsuxseg6ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)114 void test_vsuxseg6ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
115   return vsuxseg6ei8_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
116 }
117 
118 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf4(
119 // CHECK-RV64-NEXT:  entry:
120 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
121 // CHECK-RV64-NEXT:    ret void
122 //
test_vsuxseg7ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)123 void test_vsuxseg7ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
124   return vsuxseg7ei8_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
125 }
126 
127 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf4(
128 // CHECK-RV64-NEXT:  entry:
129 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
130 // CHECK-RV64-NEXT:    ret void
131 //
test_vsuxseg8ei8_v_i8mf4(int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)132 void test_vsuxseg8ei8_v_i8mf4 (int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
133   return vsuxseg8ei8_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
134 }
135 
136 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf2(
137 // CHECK-RV64-NEXT:  entry:
138 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
139 // CHECK-RV64-NEXT:    ret void
140 //
test_vsuxseg2ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)141 void test_vsuxseg2ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
142   return vsuxseg2ei8_v_i8mf2(base, bindex, v0, v1, vl);
143 }
144 
145 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf2(
146 // CHECK-RV64-NEXT:  entry:
147 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
148 // CHECK-RV64-NEXT:    ret void
149 //
test_vsuxseg3ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)150 void test_vsuxseg3ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
151   return vsuxseg3ei8_v_i8mf2(base, bindex, v0, v1, v2, vl);
152 }
153 
154 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf2(
155 // CHECK-RV64-NEXT:  entry:
156 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
157 // CHECK-RV64-NEXT:    ret void
158 //
test_vsuxseg4ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)159 void test_vsuxseg4ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
160   return vsuxseg4ei8_v_i8mf2(base, bindex, v0, v1, v2, v3, vl);
161 }
162 
163 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf2(
164 // CHECK-RV64-NEXT:  entry:
165 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
166 // CHECK-RV64-NEXT:    ret void
167 //
test_vsuxseg5ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)168 void test_vsuxseg5ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
169   return vsuxseg5ei8_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
170 }
171 
172 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf2(
173 // CHECK-RV64-NEXT:  entry:
174 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
175 // CHECK-RV64-NEXT:    ret void
176 //
test_vsuxseg6ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)177 void test_vsuxseg6ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
178   return vsuxseg6ei8_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
179 }
180 
181 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf2(
182 // CHECK-RV64-NEXT:  entry:
183 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
184 // CHECK-RV64-NEXT:    ret void
185 //
test_vsuxseg7ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)186 void test_vsuxseg7ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
187   return vsuxseg7ei8_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
188 }
189 
190 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf2(
191 // CHECK-RV64-NEXT:  entry:
192 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
193 // CHECK-RV64-NEXT:    ret void
194 //
test_vsuxseg8ei8_v_i8mf2(int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)195 void test_vsuxseg8ei8_v_i8mf2 (int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
196   return vsuxseg8ei8_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
197 }
198 
199 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m1(
200 // CHECK-RV64-NEXT:  entry:
201 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
202 // CHECK-RV64-NEXT:    ret void
203 //
test_vsuxseg2ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)204 void test_vsuxseg2ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
205   return vsuxseg2ei8_v_i8m1(base, bindex, v0, v1, vl);
206 }
207 
208 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8m1(
209 // CHECK-RV64-NEXT:  entry:
210 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
211 // CHECK-RV64-NEXT:    ret void
212 //
test_vsuxseg3ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)213 void test_vsuxseg3ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
214   return vsuxseg3ei8_v_i8m1(base, bindex, v0, v1, v2, vl);
215 }
216 
217 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8m1(
218 // CHECK-RV64-NEXT:  entry:
219 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
220 // CHECK-RV64-NEXT:    ret void
221 //
test_vsuxseg4ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)222 void test_vsuxseg4ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
223   return vsuxseg4ei8_v_i8m1(base, bindex, v0, v1, v2, v3, vl);
224 }
225 
226 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8m1(
227 // CHECK-RV64-NEXT:  entry:
228 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
229 // CHECK-RV64-NEXT:    ret void
230 //
test_vsuxseg5ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)231 void test_vsuxseg5ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
232   return vsuxseg5ei8_v_i8m1(base, bindex, v0, v1, v2, v3, v4, vl);
233 }
234 
235 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8m1(
236 // CHECK-RV64-NEXT:  entry:
237 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
238 // CHECK-RV64-NEXT:    ret void
239 //
test_vsuxseg6ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)240 void test_vsuxseg6ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
241   return vsuxseg6ei8_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
242 }
243 
244 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8m1(
245 // CHECK-RV64-NEXT:  entry:
246 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
247 // CHECK-RV64-NEXT:    ret void
248 //
test_vsuxseg7ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)249 void test_vsuxseg7ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
250   return vsuxseg7ei8_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
251 }
252 
253 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8m1(
254 // CHECK-RV64-NEXT:  entry:
255 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
256 // CHECK-RV64-NEXT:    ret void
257 //
test_vsuxseg8ei8_v_i8m1(int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)258 void test_vsuxseg8ei8_v_i8m1 (int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
259   return vsuxseg8ei8_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
260 }
261 
262 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m2(
263 // CHECK-RV64-NEXT:  entry:
264 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
265 // CHECK-RV64-NEXT:    ret void
266 //
test_vsuxseg2ei8_v_i8m2(int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)267 void test_vsuxseg2ei8_v_i8m2 (int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
268   return vsuxseg2ei8_v_i8m2(base, bindex, v0, v1, vl);
269 }
270 
271 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8m2(
272 // CHECK-RV64-NEXT:  entry:
273 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
274 // CHECK-RV64-NEXT:    ret void
275 //
test_vsuxseg3ei8_v_i8m2(int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)276 void test_vsuxseg3ei8_v_i8m2 (int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
277   return vsuxseg3ei8_v_i8m2(base, bindex, v0, v1, v2, vl);
278 }
279 
280 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8m2(
281 // CHECK-RV64-NEXT:  entry:
282 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
283 // CHECK-RV64-NEXT:    ret void
284 //
test_vsuxseg4ei8_v_i8m2(int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)285 void test_vsuxseg4ei8_v_i8m2 (int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
286   return vsuxseg4ei8_v_i8m2(base, bindex, v0, v1, v2, v3, vl);
287 }
288 
289 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m4(
290 // CHECK-RV64-NEXT:  entry:
291 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
292 // CHECK-RV64-NEXT:    ret void
293 //
test_vsuxseg2ei8_v_i8m4(int8_t * base,vuint8m4_t bindex,vint8m4_t v0,vint8m4_t v1,size_t vl)294 void test_vsuxseg2ei8_v_i8m4 (int8_t *base, vuint8m4_t bindex, vint8m4_t v0, vint8m4_t v1, size_t vl) {
295   return vsuxseg2ei8_v_i8m4(base, bindex, v0, v1, vl);
296 }
297 
298 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf8(
299 // CHECK-RV64-NEXT:  entry:
300 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
301 // CHECK-RV64-NEXT:    ret void
302 //
test_vsuxseg2ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)303 void test_vsuxseg2ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
304   return vsuxseg2ei16_v_i8mf8(base, bindex, v0, v1, vl);
305 }
306 
307 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf8(
308 // CHECK-RV64-NEXT:  entry:
309 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
310 // CHECK-RV64-NEXT:    ret void
311 //
test_vsuxseg3ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)312 void test_vsuxseg3ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
313   return vsuxseg3ei16_v_i8mf8(base, bindex, v0, v1, v2, vl);
314 }
315 
316 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf8(
317 // CHECK-RV64-NEXT:  entry:
318 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
319 // CHECK-RV64-NEXT:    ret void
320 //
test_vsuxseg4ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)321 void test_vsuxseg4ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
322   return vsuxseg4ei16_v_i8mf8(base, bindex, v0, v1, v2, v3, vl);
323 }
324 
325 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf8(
326 // CHECK-RV64-NEXT:  entry:
327 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
328 // CHECK-RV64-NEXT:    ret void
329 //
test_vsuxseg5ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)330 void test_vsuxseg5ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
331   return vsuxseg5ei16_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
332 }
333 
334 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf8(
335 // CHECK-RV64-NEXT:  entry:
336 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
337 // CHECK-RV64-NEXT:    ret void
338 //
test_vsuxseg6ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)339 void test_vsuxseg6ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
340   return vsuxseg6ei16_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
341 }
342 
343 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf8(
344 // CHECK-RV64-NEXT:  entry:
345 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
346 // CHECK-RV64-NEXT:    ret void
347 //
test_vsuxseg7ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)348 void test_vsuxseg7ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
349   return vsuxseg7ei16_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
350 }
351 
352 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf8(
353 // CHECK-RV64-NEXT:  entry:
354 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
355 // CHECK-RV64-NEXT:    ret void
356 //
test_vsuxseg8ei16_v_i8mf8(int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)357 void test_vsuxseg8ei16_v_i8mf8 (int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
358   return vsuxseg8ei16_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
359 }
360 
361 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf4(
362 // CHECK-RV64-NEXT:  entry:
363 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
364 // CHECK-RV64-NEXT:    ret void
365 //
test_vsuxseg2ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)366 void test_vsuxseg2ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
367   return vsuxseg2ei16_v_i8mf4(base, bindex, v0, v1, vl);
368 }
369 
370 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf4(
371 // CHECK-RV64-NEXT:  entry:
372 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
373 // CHECK-RV64-NEXT:    ret void
374 //
test_vsuxseg3ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)375 void test_vsuxseg3ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
376   return vsuxseg3ei16_v_i8mf4(base, bindex, v0, v1, v2, vl);
377 }
378 
379 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf4(
380 // CHECK-RV64-NEXT:  entry:
381 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
382 // CHECK-RV64-NEXT:    ret void
383 //
test_vsuxseg4ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)384 void test_vsuxseg4ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
385   return vsuxseg4ei16_v_i8mf4(base, bindex, v0, v1, v2, v3, vl);
386 }
387 
388 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf4(
389 // CHECK-RV64-NEXT:  entry:
390 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
391 // CHECK-RV64-NEXT:    ret void
392 //
test_vsuxseg5ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)393 void test_vsuxseg5ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
394   return vsuxseg5ei16_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
395 }
396 
397 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf4(
398 // CHECK-RV64-NEXT:  entry:
399 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
400 // CHECK-RV64-NEXT:    ret void
401 //
test_vsuxseg6ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)402 void test_vsuxseg6ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
403   return vsuxseg6ei16_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
404 }
405 
406 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf4(
407 // CHECK-RV64-NEXT:  entry:
408 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
409 // CHECK-RV64-NEXT:    ret void
410 //
test_vsuxseg7ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)411 void test_vsuxseg7ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
412   return vsuxseg7ei16_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
413 }
414 
415 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf4(
416 // CHECK-RV64-NEXT:  entry:
417 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
418 // CHECK-RV64-NEXT:    ret void
419 //
test_vsuxseg8ei16_v_i8mf4(int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)420 void test_vsuxseg8ei16_v_i8mf4 (int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
421   return vsuxseg8ei16_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
422 }
423 
424 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf2(
425 // CHECK-RV64-NEXT:  entry:
426 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
427 // CHECK-RV64-NEXT:    ret void
428 //
test_vsuxseg2ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)429 void test_vsuxseg2ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
430   return vsuxseg2ei16_v_i8mf2(base, bindex, v0, v1, vl);
431 }
432 
433 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf2(
434 // CHECK-RV64-NEXT:  entry:
435 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
436 // CHECK-RV64-NEXT:    ret void
437 //
test_vsuxseg3ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)438 void test_vsuxseg3ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
439   return vsuxseg3ei16_v_i8mf2(base, bindex, v0, v1, v2, vl);
440 }
441 
442 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf2(
443 // CHECK-RV64-NEXT:  entry:
444 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
445 // CHECK-RV64-NEXT:    ret void
446 //
test_vsuxseg4ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)447 void test_vsuxseg4ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
448   return vsuxseg4ei16_v_i8mf2(base, bindex, v0, v1, v2, v3, vl);
449 }
450 
451 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf2(
452 // CHECK-RV64-NEXT:  entry:
453 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
454 // CHECK-RV64-NEXT:    ret void
455 //
test_vsuxseg5ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)456 void test_vsuxseg5ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
457   return vsuxseg5ei16_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
458 }
459 
460 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf2(
461 // CHECK-RV64-NEXT:  entry:
462 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
463 // CHECK-RV64-NEXT:    ret void
464 //
test_vsuxseg6ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)465 void test_vsuxseg6ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
466   return vsuxseg6ei16_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
467 }
468 
469 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf2(
470 // CHECK-RV64-NEXT:  entry:
471 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
472 // CHECK-RV64-NEXT:    ret void
473 //
test_vsuxseg7ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)474 void test_vsuxseg7ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
475   return vsuxseg7ei16_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
476 }
477 
478 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf2(
479 // CHECK-RV64-NEXT:  entry:
480 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
481 // CHECK-RV64-NEXT:    ret void
482 //
test_vsuxseg8ei16_v_i8mf2(int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)483 void test_vsuxseg8ei16_v_i8mf2 (int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
484   return vsuxseg8ei16_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
485 }
486 
487 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m1(
488 // CHECK-RV64-NEXT:  entry:
489 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
490 // CHECK-RV64-NEXT:    ret void
491 //
test_vsuxseg2ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)492 void test_vsuxseg2ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
493   return vsuxseg2ei16_v_i8m1(base, bindex, v0, v1, vl);
494 }
495 
496 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8m1(
497 // CHECK-RV64-NEXT:  entry:
498 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
499 // CHECK-RV64-NEXT:    ret void
500 //
test_vsuxseg3ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)501 void test_vsuxseg3ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
502   return vsuxseg3ei16_v_i8m1(base, bindex, v0, v1, v2, vl);
503 }
504 
505 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8m1(
506 // CHECK-RV64-NEXT:  entry:
507 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
508 // CHECK-RV64-NEXT:    ret void
509 //
test_vsuxseg4ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)510 void test_vsuxseg4ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
511   return vsuxseg4ei16_v_i8m1(base, bindex, v0, v1, v2, v3, vl);
512 }
513 
514 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8m1(
515 // CHECK-RV64-NEXT:  entry:
516 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
517 // CHECK-RV64-NEXT:    ret void
518 //
test_vsuxseg5ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)519 void test_vsuxseg5ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
520   return vsuxseg5ei16_v_i8m1(base, bindex, v0, v1, v2, v3, v4, vl);
521 }
522 
523 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8m1(
524 // CHECK-RV64-NEXT:  entry:
525 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
526 // CHECK-RV64-NEXT:    ret void
527 //
test_vsuxseg6ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)528 void test_vsuxseg6ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
529   return vsuxseg6ei16_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
530 }
531 
532 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8m1(
533 // CHECK-RV64-NEXT:  entry:
534 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
535 // CHECK-RV64-NEXT:    ret void
536 //
test_vsuxseg7ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)537 void test_vsuxseg7ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
538   return vsuxseg7ei16_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
539 }
540 
541 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8m1(
542 // CHECK-RV64-NEXT:  entry:
543 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
544 // CHECK-RV64-NEXT:    ret void
545 //
test_vsuxseg8ei16_v_i8m1(int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)546 void test_vsuxseg8ei16_v_i8m1 (int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
547   return vsuxseg8ei16_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
548 }
549 
550 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m2(
551 // CHECK-RV64-NEXT:  entry:
552 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
553 // CHECK-RV64-NEXT:    ret void
554 //
test_vsuxseg2ei16_v_i8m2(int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)555 void test_vsuxseg2ei16_v_i8m2 (int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
556   return vsuxseg2ei16_v_i8m2(base, bindex, v0, v1, vl);
557 }
558 
559 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8m2(
560 // CHECK-RV64-NEXT:  entry:
561 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
562 // CHECK-RV64-NEXT:    ret void
563 //
test_vsuxseg3ei16_v_i8m2(int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)564 void test_vsuxseg3ei16_v_i8m2 (int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
565   return vsuxseg3ei16_v_i8m2(base, bindex, v0, v1, v2, vl);
566 }
567 
568 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8m2(
569 // CHECK-RV64-NEXT:  entry:
570 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
571 // CHECK-RV64-NEXT:    ret void
572 //
test_vsuxseg4ei16_v_i8m2(int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)573 void test_vsuxseg4ei16_v_i8m2 (int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
574   return vsuxseg4ei16_v_i8m2(base, bindex, v0, v1, v2, v3, vl);
575 }
576 
577 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m4(
578 // CHECK-RV64-NEXT:  entry:
579 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
580 // CHECK-RV64-NEXT:    ret void
581 //
test_vsuxseg2ei16_v_i8m4(int8_t * base,vuint16m8_t bindex,vint8m4_t v0,vint8m4_t v1,size_t vl)582 void test_vsuxseg2ei16_v_i8m4 (int8_t *base, vuint16m8_t bindex, vint8m4_t v0, vint8m4_t v1, size_t vl) {
583   return vsuxseg2ei16_v_i8m4(base, bindex, v0, v1, vl);
584 }
585 
586 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf8(
587 // CHECK-RV64-NEXT:  entry:
588 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
589 // CHECK-RV64-NEXT:    ret void
590 //
test_vsuxseg2ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)591 void test_vsuxseg2ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
592   return vsuxseg2ei32_v_i8mf8(base, bindex, v0, v1, vl);
593 }
594 
595 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf8(
596 // CHECK-RV64-NEXT:  entry:
597 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
598 // CHECK-RV64-NEXT:    ret void
599 //
test_vsuxseg3ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)600 void test_vsuxseg3ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
601   return vsuxseg3ei32_v_i8mf8(base, bindex, v0, v1, v2, vl);
602 }
603 
604 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf8(
605 // CHECK-RV64-NEXT:  entry:
606 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
607 // CHECK-RV64-NEXT:    ret void
608 //
test_vsuxseg4ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)609 void test_vsuxseg4ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
610   return vsuxseg4ei32_v_i8mf8(base, bindex, v0, v1, v2, v3, vl);
611 }
612 
613 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf8(
614 // CHECK-RV64-NEXT:  entry:
615 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
616 // CHECK-RV64-NEXT:    ret void
617 //
test_vsuxseg5ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)618 void test_vsuxseg5ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
619   return vsuxseg5ei32_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
620 }
621 
622 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf8(
623 // CHECK-RV64-NEXT:  entry:
624 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
625 // CHECK-RV64-NEXT:    ret void
626 //
test_vsuxseg6ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)627 void test_vsuxseg6ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
628   return vsuxseg6ei32_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
629 }
630 
631 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf8(
632 // CHECK-RV64-NEXT:  entry:
633 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
634 // CHECK-RV64-NEXT:    ret void
635 //
test_vsuxseg7ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)636 void test_vsuxseg7ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
637   return vsuxseg7ei32_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
638 }
639 
640 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf8(
641 // CHECK-RV64-NEXT:  entry:
642 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
643 // CHECK-RV64-NEXT:    ret void
644 //
test_vsuxseg8ei32_v_i8mf8(int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)645 void test_vsuxseg8ei32_v_i8mf8 (int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
646   return vsuxseg8ei32_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
647 }
648 
649 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf4(
650 // CHECK-RV64-NEXT:  entry:
651 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
652 // CHECK-RV64-NEXT:    ret void
653 //
test_vsuxseg2ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)654 void test_vsuxseg2ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
655   return vsuxseg2ei32_v_i8mf4(base, bindex, v0, v1, vl);
656 }
657 
658 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf4(
659 // CHECK-RV64-NEXT:  entry:
660 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
661 // CHECK-RV64-NEXT:    ret void
662 //
test_vsuxseg3ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)663 void test_vsuxseg3ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
664   return vsuxseg3ei32_v_i8mf4(base, bindex, v0, v1, v2, vl);
665 }
666 
667 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf4(
668 // CHECK-RV64-NEXT:  entry:
669 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
670 // CHECK-RV64-NEXT:    ret void
671 //
test_vsuxseg4ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)672 void test_vsuxseg4ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
673   return vsuxseg4ei32_v_i8mf4(base, bindex, v0, v1, v2, v3, vl);
674 }
675 
676 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf4(
677 // CHECK-RV64-NEXT:  entry:
678 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
679 // CHECK-RV64-NEXT:    ret void
680 //
test_vsuxseg5ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)681 void test_vsuxseg5ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
682   return vsuxseg5ei32_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
683 }
684 
685 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf4(
686 // CHECK-RV64-NEXT:  entry:
687 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
688 // CHECK-RV64-NEXT:    ret void
689 //
test_vsuxseg6ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)690 void test_vsuxseg6ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
691   return vsuxseg6ei32_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
692 }
693 
694 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf4(
695 // CHECK-RV64-NEXT:  entry:
696 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
697 // CHECK-RV64-NEXT:    ret void
698 //
test_vsuxseg7ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)699 void test_vsuxseg7ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
700   return vsuxseg7ei32_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
701 }
702 
703 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf4(
704 // CHECK-RV64-NEXT:  entry:
705 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
706 // CHECK-RV64-NEXT:    ret void
707 //
test_vsuxseg8ei32_v_i8mf4(int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)708 void test_vsuxseg8ei32_v_i8mf4 (int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
709   return vsuxseg8ei32_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
710 }
711 
712 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf2(
713 // CHECK-RV64-NEXT:  entry:
714 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
715 // CHECK-RV64-NEXT:    ret void
716 //
test_vsuxseg2ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)717 void test_vsuxseg2ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
718   return vsuxseg2ei32_v_i8mf2(base, bindex, v0, v1, vl);
719 }
720 
721 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf2(
722 // CHECK-RV64-NEXT:  entry:
723 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
724 // CHECK-RV64-NEXT:    ret void
725 //
test_vsuxseg3ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)726 void test_vsuxseg3ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
727   return vsuxseg3ei32_v_i8mf2(base, bindex, v0, v1, v2, vl);
728 }
729 
730 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf2(
731 // CHECK-RV64-NEXT:  entry:
732 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
733 // CHECK-RV64-NEXT:    ret void
734 //
test_vsuxseg4ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)735 void test_vsuxseg4ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
736   return vsuxseg4ei32_v_i8mf2(base, bindex, v0, v1, v2, v3, vl);
737 }
738 
739 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf2(
740 // CHECK-RV64-NEXT:  entry:
741 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
742 // CHECK-RV64-NEXT:    ret void
743 //
test_vsuxseg5ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)744 void test_vsuxseg5ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
745   return vsuxseg5ei32_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
746 }
747 
748 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf2(
749 // CHECK-RV64-NEXT:  entry:
750 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
751 // CHECK-RV64-NEXT:    ret void
752 //
test_vsuxseg6ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)753 void test_vsuxseg6ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
754   return vsuxseg6ei32_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
755 }
756 
757 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf2(
758 // CHECK-RV64-NEXT:  entry:
759 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
760 // CHECK-RV64-NEXT:    ret void
761 //
test_vsuxseg7ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)762 void test_vsuxseg7ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
763   return vsuxseg7ei32_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
764 }
765 
766 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf2(
767 // CHECK-RV64-NEXT:  entry:
768 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
769 // CHECK-RV64-NEXT:    ret void
770 //
test_vsuxseg8ei32_v_i8mf2(int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)771 void test_vsuxseg8ei32_v_i8mf2 (int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
772   return vsuxseg8ei32_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
773 }
774 
775 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8m1(
776 // CHECK-RV64-NEXT:  entry:
777 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
778 // CHECK-RV64-NEXT:    ret void
779 //
test_vsuxseg2ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)780 void test_vsuxseg2ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
781   return vsuxseg2ei32_v_i8m1(base, bindex, v0, v1, vl);
782 }
783 
784 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8m1(
785 // CHECK-RV64-NEXT:  entry:
786 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
787 // CHECK-RV64-NEXT:    ret void
788 //
test_vsuxseg3ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)789 void test_vsuxseg3ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
790   return vsuxseg3ei32_v_i8m1(base, bindex, v0, v1, v2, vl);
791 }
792 
793 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8m1(
794 // CHECK-RV64-NEXT:  entry:
795 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
796 // CHECK-RV64-NEXT:    ret void
797 //
test_vsuxseg4ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)798 void test_vsuxseg4ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
799   return vsuxseg4ei32_v_i8m1(base, bindex, v0, v1, v2, v3, vl);
800 }
801 
802 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8m1(
803 // CHECK-RV64-NEXT:  entry:
804 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
805 // CHECK-RV64-NEXT:    ret void
806 //
test_vsuxseg5ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)807 void test_vsuxseg5ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
808   return vsuxseg5ei32_v_i8m1(base, bindex, v0, v1, v2, v3, v4, vl);
809 }
810 
811 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8m1(
812 // CHECK-RV64-NEXT:  entry:
813 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
814 // CHECK-RV64-NEXT:    ret void
815 //
test_vsuxseg6ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)816 void test_vsuxseg6ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
817   return vsuxseg6ei32_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
818 }
819 
820 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8m1(
821 // CHECK-RV64-NEXT:  entry:
822 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
823 // CHECK-RV64-NEXT:    ret void
824 //
test_vsuxseg7ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)825 void test_vsuxseg7ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
826   return vsuxseg7ei32_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
827 }
828 
829 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8m1(
830 // CHECK-RV64-NEXT:  entry:
831 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
832 // CHECK-RV64-NEXT:    ret void
833 //
test_vsuxseg8ei32_v_i8m1(int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)834 void test_vsuxseg8ei32_v_i8m1 (int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
835   return vsuxseg8ei32_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
836 }
837 
838 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8m2(
839 // CHECK-RV64-NEXT:  entry:
840 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
841 // CHECK-RV64-NEXT:    ret void
842 //
test_vsuxseg2ei32_v_i8m2(int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)843 void test_vsuxseg2ei32_v_i8m2 (int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
844   return vsuxseg2ei32_v_i8m2(base, bindex, v0, v1, vl);
845 }
846 
847 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8m2(
848 // CHECK-RV64-NEXT:  entry:
849 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
850 // CHECK-RV64-NEXT:    ret void
851 //
test_vsuxseg3ei32_v_i8m2(int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)852 void test_vsuxseg3ei32_v_i8m2 (int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
853   return vsuxseg3ei32_v_i8m2(base, bindex, v0, v1, v2, vl);
854 }
855 
856 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8m2(
857 // CHECK-RV64-NEXT:  entry:
858 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
859 // CHECK-RV64-NEXT:    ret void
860 //
test_vsuxseg4ei32_v_i8m2(int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)861 void test_vsuxseg4ei32_v_i8m2 (int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
862   return vsuxseg4ei32_v_i8m2(base, bindex, v0, v1, v2, v3, vl);
863 }
864 
865 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf8(
866 // CHECK-RV64-NEXT:  entry:
867 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
868 // CHECK-RV64-NEXT:    ret void
869 //
test_vsuxseg2ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)870 void test_vsuxseg2ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
871   return vsuxseg2ei64_v_i8mf8(base, bindex, v0, v1, vl);
872 }
873 
874 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf8(
875 // CHECK-RV64-NEXT:  entry:
876 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
877 // CHECK-RV64-NEXT:    ret void
878 //
test_vsuxseg3ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)879 void test_vsuxseg3ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
880   return vsuxseg3ei64_v_i8mf8(base, bindex, v0, v1, v2, vl);
881 }
882 
883 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf8(
884 // CHECK-RV64-NEXT:  entry:
885 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
886 // CHECK-RV64-NEXT:    ret void
887 //
test_vsuxseg4ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)888 void test_vsuxseg4ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
889   return vsuxseg4ei64_v_i8mf8(base, bindex, v0, v1, v2, v3, vl);
890 }
891 
892 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf8(
893 // CHECK-RV64-NEXT:  entry:
894 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
895 // CHECK-RV64-NEXT:    ret void
896 //
test_vsuxseg5ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)897 void test_vsuxseg5ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
898   return vsuxseg5ei64_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
899 }
900 
901 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf8(
902 // CHECK-RV64-NEXT:  entry:
903 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
904 // CHECK-RV64-NEXT:    ret void
905 //
test_vsuxseg6ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)906 void test_vsuxseg6ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
907   return vsuxseg6ei64_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
908 }
909 
910 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf8(
911 // CHECK-RV64-NEXT:  entry:
912 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
913 // CHECK-RV64-NEXT:    ret void
914 //
test_vsuxseg7ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)915 void test_vsuxseg7ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
916   return vsuxseg7ei64_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
917 }
918 
919 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf8(
920 // CHECK-RV64-NEXT:  entry:
921 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
922 // CHECK-RV64-NEXT:    ret void
923 //
test_vsuxseg8ei64_v_i8mf8(int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)924 void test_vsuxseg8ei64_v_i8mf8 (int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
925   return vsuxseg8ei64_v_i8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
926 }
927 
928 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf4(
929 // CHECK-RV64-NEXT:  entry:
930 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
931 // CHECK-RV64-NEXT:    ret void
932 //
test_vsuxseg2ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)933 void test_vsuxseg2ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
934   return vsuxseg2ei64_v_i8mf4(base, bindex, v0, v1, vl);
935 }
936 
937 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf4(
938 // CHECK-RV64-NEXT:  entry:
939 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
940 // CHECK-RV64-NEXT:    ret void
941 //
test_vsuxseg3ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)942 void test_vsuxseg3ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
943   return vsuxseg3ei64_v_i8mf4(base, bindex, v0, v1, v2, vl);
944 }
945 
946 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf4(
947 // CHECK-RV64-NEXT:  entry:
948 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
949 // CHECK-RV64-NEXT:    ret void
950 //
test_vsuxseg4ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)951 void test_vsuxseg4ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
952   return vsuxseg4ei64_v_i8mf4(base, bindex, v0, v1, v2, v3, vl);
953 }
954 
955 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf4(
956 // CHECK-RV64-NEXT:  entry:
957 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
958 // CHECK-RV64-NEXT:    ret void
959 //
test_vsuxseg5ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)960 void test_vsuxseg5ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
961   return vsuxseg5ei64_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
962 }
963 
964 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf4(
965 // CHECK-RV64-NEXT:  entry:
966 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
967 // CHECK-RV64-NEXT:    ret void
968 //
test_vsuxseg6ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)969 void test_vsuxseg6ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
970   return vsuxseg6ei64_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
971 }
972 
973 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf4(
974 // CHECK-RV64-NEXT:  entry:
975 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
976 // CHECK-RV64-NEXT:    ret void
977 //
test_vsuxseg7ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)978 void test_vsuxseg7ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
979   return vsuxseg7ei64_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
980 }
981 
982 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf4(
983 // CHECK-RV64-NEXT:  entry:
984 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
985 // CHECK-RV64-NEXT:    ret void
986 //
test_vsuxseg8ei64_v_i8mf4(int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)987 void test_vsuxseg8ei64_v_i8mf4 (int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
988   return vsuxseg8ei64_v_i8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
989 }
990 
991 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf2(
992 // CHECK-RV64-NEXT:  entry:
993 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
994 // CHECK-RV64-NEXT:    ret void
995 //
test_vsuxseg2ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)996 void test_vsuxseg2ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
997   return vsuxseg2ei64_v_i8mf2(base, bindex, v0, v1, vl);
998 }
999 
1000 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf2(
1001 // CHECK-RV64-NEXT:  entry:
1002 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1003 // CHECK-RV64-NEXT:    ret void
1004 //
test_vsuxseg3ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)1005 void test_vsuxseg3ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
1006   return vsuxseg3ei64_v_i8mf2(base, bindex, v0, v1, v2, vl);
1007 }
1008 
1009 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf2(
1010 // CHECK-RV64-NEXT:  entry:
1011 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1012 // CHECK-RV64-NEXT:    ret void
1013 //
test_vsuxseg4ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)1014 void test_vsuxseg4ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
1015   return vsuxseg4ei64_v_i8mf2(base, bindex, v0, v1, v2, v3, vl);
1016 }
1017 
1018 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf2(
1019 // CHECK-RV64-NEXT:  entry:
1020 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1021 // CHECK-RV64-NEXT:    ret void
1022 //
test_vsuxseg5ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)1023 void test_vsuxseg5ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
1024   return vsuxseg5ei64_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
1025 }
1026 
1027 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf2(
1028 // CHECK-RV64-NEXT:  entry:
1029 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1030 // CHECK-RV64-NEXT:    ret void
1031 //
test_vsuxseg6ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)1032 void test_vsuxseg6ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
1033   return vsuxseg6ei64_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1034 }
1035 
1036 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf2(
1037 // CHECK-RV64-NEXT:  entry:
1038 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1039 // CHECK-RV64-NEXT:    ret void
1040 //
test_vsuxseg7ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)1041 void test_vsuxseg7ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
1042   return vsuxseg7ei64_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1043 }
1044 
1045 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf2(
1046 // CHECK-RV64-NEXT:  entry:
1047 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1048 // CHECK-RV64-NEXT:    ret void
1049 //
test_vsuxseg8ei64_v_i8mf2(int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)1050 void test_vsuxseg8ei64_v_i8mf2 (int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
1051   return vsuxseg8ei64_v_i8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1052 }
1053 
1054 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8m1(
1055 // CHECK-RV64-NEXT:  entry:
1056 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1057 // CHECK-RV64-NEXT:    ret void
1058 //
test_vsuxseg2ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)1059 void test_vsuxseg2ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
1060   return vsuxseg2ei64_v_i8m1(base, bindex, v0, v1, vl);
1061 }
1062 
1063 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8m1(
1064 // CHECK-RV64-NEXT:  entry:
1065 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1066 // CHECK-RV64-NEXT:    ret void
1067 //
test_vsuxseg3ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)1068 void test_vsuxseg3ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
1069   return vsuxseg3ei64_v_i8m1(base, bindex, v0, v1, v2, vl);
1070 }
1071 
1072 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8m1(
1073 // CHECK-RV64-NEXT:  entry:
1074 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1075 // CHECK-RV64-NEXT:    ret void
1076 //
test_vsuxseg4ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)1077 void test_vsuxseg4ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
1078   return vsuxseg4ei64_v_i8m1(base, bindex, v0, v1, v2, v3, vl);
1079 }
1080 
1081 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8m1(
1082 // CHECK-RV64-NEXT:  entry:
1083 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1084 // CHECK-RV64-NEXT:    ret void
1085 //
test_vsuxseg5ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)1086 void test_vsuxseg5ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
1087   return vsuxseg5ei64_v_i8m1(base, bindex, v0, v1, v2, v3, v4, vl);
1088 }
1089 
1090 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8m1(
1091 // CHECK-RV64-NEXT:  entry:
1092 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1093 // CHECK-RV64-NEXT:    ret void
1094 //
test_vsuxseg6ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)1095 void test_vsuxseg6ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
1096   return vsuxseg6ei64_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1097 }
1098 
1099 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8m1(
1100 // CHECK-RV64-NEXT:  entry:
1101 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1102 // CHECK-RV64-NEXT:    ret void
1103 //
test_vsuxseg7ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)1104 void test_vsuxseg7ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
1105   return vsuxseg7ei64_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1106 }
1107 
1108 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8m1(
1109 // CHECK-RV64-NEXT:  entry:
1110 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1111 // CHECK-RV64-NEXT:    ret void
1112 //
test_vsuxseg8ei64_v_i8m1(int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)1113 void test_vsuxseg8ei64_v_i8m1 (int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
1114   return vsuxseg8ei64_v_i8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1115 }
1116 
1117 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16mf4(
1118 // CHECK-RV64-NEXT:  entry:
1119 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1120 // CHECK-RV64-NEXT:    ret void
1121 //
test_vsuxseg2ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)1122 void test_vsuxseg2ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
1123   return vsuxseg2ei8_v_i16mf4(base, bindex, v0, v1, vl);
1124 }
1125 
1126 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16mf4(
1127 // CHECK-RV64-NEXT:  entry:
1128 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1129 // CHECK-RV64-NEXT:    ret void
1130 //
test_vsuxseg3ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)1131 void test_vsuxseg3ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
1132   return vsuxseg3ei8_v_i16mf4(base, bindex, v0, v1, v2, vl);
1133 }
1134 
1135 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16mf4(
1136 // CHECK-RV64-NEXT:  entry:
1137 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1138 // CHECK-RV64-NEXT:    ret void
1139 //
test_vsuxseg4ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)1140 void test_vsuxseg4ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
1141   return vsuxseg4ei8_v_i16mf4(base, bindex, v0, v1, v2, v3, vl);
1142 }
1143 
1144 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16mf4(
1145 // CHECK-RV64-NEXT:  entry:
1146 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1147 // CHECK-RV64-NEXT:    ret void
1148 //
test_vsuxseg5ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)1149 void test_vsuxseg5ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
1150   return vsuxseg5ei8_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
1151 }
1152 
1153 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16mf4(
1154 // CHECK-RV64-NEXT:  entry:
1155 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1156 // CHECK-RV64-NEXT:    ret void
1157 //
test_vsuxseg6ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)1158 void test_vsuxseg6ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
1159   return vsuxseg6ei8_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1160 }
1161 
1162 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16mf4(
1163 // CHECK-RV64-NEXT:  entry:
1164 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1165 // CHECK-RV64-NEXT:    ret void
1166 //
test_vsuxseg7ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)1167 void test_vsuxseg7ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
1168   return vsuxseg7ei8_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1169 }
1170 
1171 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16mf4(
1172 // CHECK-RV64-NEXT:  entry:
1173 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1174 // CHECK-RV64-NEXT:    ret void
1175 //
test_vsuxseg8ei8_v_i16mf4(int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)1176 void test_vsuxseg8ei8_v_i16mf4 (int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
1177   return vsuxseg8ei8_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1178 }
1179 
1180 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16mf2(
1181 // CHECK-RV64-NEXT:  entry:
1182 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1183 // CHECK-RV64-NEXT:    ret void
1184 //
test_vsuxseg2ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)1185 void test_vsuxseg2ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
1186   return vsuxseg2ei8_v_i16mf2(base, bindex, v0, v1, vl);
1187 }
1188 
1189 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16mf2(
1190 // CHECK-RV64-NEXT:  entry:
1191 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1192 // CHECK-RV64-NEXT:    ret void
1193 //
test_vsuxseg3ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)1194 void test_vsuxseg3ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
1195   return vsuxseg3ei8_v_i16mf2(base, bindex, v0, v1, v2, vl);
1196 }
1197 
1198 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16mf2(
1199 // CHECK-RV64-NEXT:  entry:
1200 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1201 // CHECK-RV64-NEXT:    ret void
1202 //
test_vsuxseg4ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)1203 void test_vsuxseg4ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
1204   return vsuxseg4ei8_v_i16mf2(base, bindex, v0, v1, v2, v3, vl);
1205 }
1206 
1207 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16mf2(
1208 // CHECK-RV64-NEXT:  entry:
1209 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1210 // CHECK-RV64-NEXT:    ret void
1211 //
test_vsuxseg5ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)1212 void test_vsuxseg5ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
1213   return vsuxseg5ei8_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
1214 }
1215 
1216 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16mf2(
1217 // CHECK-RV64-NEXT:  entry:
1218 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1219 // CHECK-RV64-NEXT:    ret void
1220 //
test_vsuxseg6ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)1221 void test_vsuxseg6ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
1222   return vsuxseg6ei8_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1223 }
1224 
1225 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16mf2(
1226 // CHECK-RV64-NEXT:  entry:
1227 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1228 // CHECK-RV64-NEXT:    ret void
1229 //
test_vsuxseg7ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)1230 void test_vsuxseg7ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
1231   return vsuxseg7ei8_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1232 }
1233 
1234 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16mf2(
1235 // CHECK-RV64-NEXT:  entry:
1236 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1237 // CHECK-RV64-NEXT:    ret void
1238 //
test_vsuxseg8ei8_v_i16mf2(int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)1239 void test_vsuxseg8ei8_v_i16mf2 (int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
1240   return vsuxseg8ei8_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1241 }
1242 
1243 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m1(
1244 // CHECK-RV64-NEXT:  entry:
1245 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1246 // CHECK-RV64-NEXT:    ret void
1247 //
test_vsuxseg2ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)1248 void test_vsuxseg2ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
1249   return vsuxseg2ei8_v_i16m1(base, bindex, v0, v1, vl);
1250 }
1251 
1252 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16m1(
1253 // CHECK-RV64-NEXT:  entry:
1254 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1255 // CHECK-RV64-NEXT:    ret void
1256 //
test_vsuxseg3ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)1257 void test_vsuxseg3ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
1258   return vsuxseg3ei8_v_i16m1(base, bindex, v0, v1, v2, vl);
1259 }
1260 
1261 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16m1(
1262 // CHECK-RV64-NEXT:  entry:
1263 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1264 // CHECK-RV64-NEXT:    ret void
1265 //
test_vsuxseg4ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)1266 void test_vsuxseg4ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
1267   return vsuxseg4ei8_v_i16m1(base, bindex, v0, v1, v2, v3, vl);
1268 }
1269 
1270 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16m1(
1271 // CHECK-RV64-NEXT:  entry:
1272 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1273 // CHECK-RV64-NEXT:    ret void
1274 //
test_vsuxseg5ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)1275 void test_vsuxseg5ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
1276   return vsuxseg5ei8_v_i16m1(base, bindex, v0, v1, v2, v3, v4, vl);
1277 }
1278 
1279 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16m1(
1280 // CHECK-RV64-NEXT:  entry:
1281 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1282 // CHECK-RV64-NEXT:    ret void
1283 //
test_vsuxseg6ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)1284 void test_vsuxseg6ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
1285   return vsuxseg6ei8_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1286 }
1287 
1288 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16m1(
1289 // CHECK-RV64-NEXT:  entry:
1290 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1291 // CHECK-RV64-NEXT:    ret void
1292 //
test_vsuxseg7ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)1293 void test_vsuxseg7ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
1294   return vsuxseg7ei8_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1295 }
1296 
1297 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16m1(
1298 // CHECK-RV64-NEXT:  entry:
1299 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1300 // CHECK-RV64-NEXT:    ret void
1301 //
test_vsuxseg8ei8_v_i16m1(int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)1302 void test_vsuxseg8ei8_v_i16m1 (int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
1303   return vsuxseg8ei8_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1304 }
1305 
1306 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m2(
1307 // CHECK-RV64-NEXT:  entry:
1308 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1309 // CHECK-RV64-NEXT:    ret void
1310 //
test_vsuxseg2ei8_v_i16m2(int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)1311 void test_vsuxseg2ei8_v_i16m2 (int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
1312   return vsuxseg2ei8_v_i16m2(base, bindex, v0, v1, vl);
1313 }
1314 
1315 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16m2(
1316 // CHECK-RV64-NEXT:  entry:
1317 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1318 // CHECK-RV64-NEXT:    ret void
1319 //
test_vsuxseg3ei8_v_i16m2(int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)1320 void test_vsuxseg3ei8_v_i16m2 (int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
1321   return vsuxseg3ei8_v_i16m2(base, bindex, v0, v1, v2, vl);
1322 }
1323 
1324 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16m2(
1325 // CHECK-RV64-NEXT:  entry:
1326 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1327 // CHECK-RV64-NEXT:    ret void
1328 //
test_vsuxseg4ei8_v_i16m2(int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)1329 void test_vsuxseg4ei8_v_i16m2 (int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
1330   return vsuxseg4ei8_v_i16m2(base, bindex, v0, v1, v2, v3, vl);
1331 }
1332 
1333 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m4(
1334 // CHECK-RV64-NEXT:  entry:
1335 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
1336 // CHECK-RV64-NEXT:    ret void
1337 //
test_vsuxseg2ei8_v_i16m4(int16_t * base,vuint8m2_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)1338 void test_vsuxseg2ei8_v_i16m4 (int16_t *base, vuint8m2_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
1339   return vsuxseg2ei8_v_i16m4(base, bindex, v0, v1, vl);
1340 }
1341 
1342 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16mf4(
1343 // CHECK-RV64-NEXT:  entry:
1344 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1345 // CHECK-RV64-NEXT:    ret void
1346 //
test_vsuxseg2ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)1347 void test_vsuxseg2ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
1348   return vsuxseg2ei16_v_i16mf4(base, bindex, v0, v1, vl);
1349 }
1350 
1351 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16mf4(
1352 // CHECK-RV64-NEXT:  entry:
1353 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1354 // CHECK-RV64-NEXT:    ret void
1355 //
test_vsuxseg3ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)1356 void test_vsuxseg3ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
1357   return vsuxseg3ei16_v_i16mf4(base, bindex, v0, v1, v2, vl);
1358 }
1359 
1360 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16mf4(
1361 // CHECK-RV64-NEXT:  entry:
1362 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1363 // CHECK-RV64-NEXT:    ret void
1364 //
test_vsuxseg4ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)1365 void test_vsuxseg4ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
1366   return vsuxseg4ei16_v_i16mf4(base, bindex, v0, v1, v2, v3, vl);
1367 }
1368 
1369 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16mf4(
1370 // CHECK-RV64-NEXT:  entry:
1371 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1372 // CHECK-RV64-NEXT:    ret void
1373 //
test_vsuxseg5ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)1374 void test_vsuxseg5ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
1375   return vsuxseg5ei16_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
1376 }
1377 
1378 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16mf4(
1379 // CHECK-RV64-NEXT:  entry:
1380 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1381 // CHECK-RV64-NEXT:    ret void
1382 //
test_vsuxseg6ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)1383 void test_vsuxseg6ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
1384   return vsuxseg6ei16_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1385 }
1386 
1387 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16mf4(
1388 // CHECK-RV64-NEXT:  entry:
1389 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1390 // CHECK-RV64-NEXT:    ret void
1391 //
test_vsuxseg7ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)1392 void test_vsuxseg7ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
1393   return vsuxseg7ei16_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1394 }
1395 
1396 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16mf4(
1397 // CHECK-RV64-NEXT:  entry:
1398 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1399 // CHECK-RV64-NEXT:    ret void
1400 //
test_vsuxseg8ei16_v_i16mf4(int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)1401 void test_vsuxseg8ei16_v_i16mf4 (int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
1402   return vsuxseg8ei16_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1403 }
1404 
1405 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16mf2(
1406 // CHECK-RV64-NEXT:  entry:
1407 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1408 // CHECK-RV64-NEXT:    ret void
1409 //
test_vsuxseg2ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)1410 void test_vsuxseg2ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
1411   return vsuxseg2ei16_v_i16mf2(base, bindex, v0, v1, vl);
1412 }
1413 
1414 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16mf2(
1415 // CHECK-RV64-NEXT:  entry:
1416 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1417 // CHECK-RV64-NEXT:    ret void
1418 //
test_vsuxseg3ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)1419 void test_vsuxseg3ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
1420   return vsuxseg3ei16_v_i16mf2(base, bindex, v0, v1, v2, vl);
1421 }
1422 
1423 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16mf2(
1424 // CHECK-RV64-NEXT:  entry:
1425 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1426 // CHECK-RV64-NEXT:    ret void
1427 //
test_vsuxseg4ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)1428 void test_vsuxseg4ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
1429   return vsuxseg4ei16_v_i16mf2(base, bindex, v0, v1, v2, v3, vl);
1430 }
1431 
1432 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16mf2(
1433 // CHECK-RV64-NEXT:  entry:
1434 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1435 // CHECK-RV64-NEXT:    ret void
1436 //
test_vsuxseg5ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)1437 void test_vsuxseg5ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
1438   return vsuxseg5ei16_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
1439 }
1440 
1441 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16mf2(
1442 // CHECK-RV64-NEXT:  entry:
1443 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1444 // CHECK-RV64-NEXT:    ret void
1445 //
test_vsuxseg6ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)1446 void test_vsuxseg6ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
1447   return vsuxseg6ei16_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1448 }
1449 
1450 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16mf2(
1451 // CHECK-RV64-NEXT:  entry:
1452 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1453 // CHECK-RV64-NEXT:    ret void
1454 //
test_vsuxseg7ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)1455 void test_vsuxseg7ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
1456   return vsuxseg7ei16_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1457 }
1458 
1459 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16mf2(
1460 // CHECK-RV64-NEXT:  entry:
1461 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1462 // CHECK-RV64-NEXT:    ret void
1463 //
test_vsuxseg8ei16_v_i16mf2(int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)1464 void test_vsuxseg8ei16_v_i16mf2 (int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
1465   return vsuxseg8ei16_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1466 }
1467 
1468 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m1(
1469 // CHECK-RV64-NEXT:  entry:
1470 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1471 // CHECK-RV64-NEXT:    ret void
1472 //
test_vsuxseg2ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)1473 void test_vsuxseg2ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
1474   return vsuxseg2ei16_v_i16m1(base, bindex, v0, v1, vl);
1475 }
1476 
1477 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16m1(
1478 // CHECK-RV64-NEXT:  entry:
1479 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1480 // CHECK-RV64-NEXT:    ret void
1481 //
test_vsuxseg3ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)1482 void test_vsuxseg3ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
1483   return vsuxseg3ei16_v_i16m1(base, bindex, v0, v1, v2, vl);
1484 }
1485 
1486 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16m1(
1487 // CHECK-RV64-NEXT:  entry:
1488 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1489 // CHECK-RV64-NEXT:    ret void
1490 //
test_vsuxseg4ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)1491 void test_vsuxseg4ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
1492   return vsuxseg4ei16_v_i16m1(base, bindex, v0, v1, v2, v3, vl);
1493 }
1494 
1495 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16m1(
1496 // CHECK-RV64-NEXT:  entry:
1497 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1498 // CHECK-RV64-NEXT:    ret void
1499 //
test_vsuxseg5ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)1500 void test_vsuxseg5ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
1501   return vsuxseg5ei16_v_i16m1(base, bindex, v0, v1, v2, v3, v4, vl);
1502 }
1503 
1504 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16m1(
1505 // CHECK-RV64-NEXT:  entry:
1506 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1507 // CHECK-RV64-NEXT:    ret void
1508 //
test_vsuxseg6ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)1509 void test_vsuxseg6ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
1510   return vsuxseg6ei16_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1511 }
1512 
1513 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16m1(
1514 // CHECK-RV64-NEXT:  entry:
1515 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1516 // CHECK-RV64-NEXT:    ret void
1517 //
test_vsuxseg7ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)1518 void test_vsuxseg7ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
1519   return vsuxseg7ei16_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1520 }
1521 
1522 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16m1(
1523 // CHECK-RV64-NEXT:  entry:
1524 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1525 // CHECK-RV64-NEXT:    ret void
1526 //
test_vsuxseg8ei16_v_i16m1(int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)1527 void test_vsuxseg8ei16_v_i16m1 (int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
1528   return vsuxseg8ei16_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1529 }
1530 
1531 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m2(
1532 // CHECK-RV64-NEXT:  entry:
1533 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1534 // CHECK-RV64-NEXT:    ret void
1535 //
test_vsuxseg2ei16_v_i16m2(int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)1536 void test_vsuxseg2ei16_v_i16m2 (int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
1537   return vsuxseg2ei16_v_i16m2(base, bindex, v0, v1, vl);
1538 }
1539 
1540 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16m2(
1541 // CHECK-RV64-NEXT:  entry:
1542 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1543 // CHECK-RV64-NEXT:    ret void
1544 //
test_vsuxseg3ei16_v_i16m2(int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)1545 void test_vsuxseg3ei16_v_i16m2 (int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
1546   return vsuxseg3ei16_v_i16m2(base, bindex, v0, v1, v2, vl);
1547 }
1548 
1549 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16m2(
1550 // CHECK-RV64-NEXT:  entry:
1551 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1552 // CHECK-RV64-NEXT:    ret void
1553 //
test_vsuxseg4ei16_v_i16m2(int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)1554 void test_vsuxseg4ei16_v_i16m2 (int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
1555   return vsuxseg4ei16_v_i16m2(base, bindex, v0, v1, v2, v3, vl);
1556 }
1557 
1558 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m4(
1559 // CHECK-RV64-NEXT:  entry:
1560 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
1561 // CHECK-RV64-NEXT:    ret void
1562 //
test_vsuxseg2ei16_v_i16m4(int16_t * base,vuint16m4_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)1563 void test_vsuxseg2ei16_v_i16m4 (int16_t *base, vuint16m4_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
1564   return vsuxseg2ei16_v_i16m4(base, bindex, v0, v1, vl);
1565 }
1566 
1567 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16mf4(
1568 // CHECK-RV64-NEXT:  entry:
1569 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1570 // CHECK-RV64-NEXT:    ret void
1571 //
test_vsuxseg2ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)1572 void test_vsuxseg2ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
1573   return vsuxseg2ei32_v_i16mf4(base, bindex, v0, v1, vl);
1574 }
1575 
1576 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16mf4(
1577 // CHECK-RV64-NEXT:  entry:
1578 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1579 // CHECK-RV64-NEXT:    ret void
1580 //
test_vsuxseg3ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)1581 void test_vsuxseg3ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
1582   return vsuxseg3ei32_v_i16mf4(base, bindex, v0, v1, v2, vl);
1583 }
1584 
1585 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16mf4(
1586 // CHECK-RV64-NEXT:  entry:
1587 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1588 // CHECK-RV64-NEXT:    ret void
1589 //
test_vsuxseg4ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)1590 void test_vsuxseg4ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
1591   return vsuxseg4ei32_v_i16mf4(base, bindex, v0, v1, v2, v3, vl);
1592 }
1593 
1594 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16mf4(
1595 // CHECK-RV64-NEXT:  entry:
1596 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1597 // CHECK-RV64-NEXT:    ret void
1598 //
test_vsuxseg5ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)1599 void test_vsuxseg5ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
1600   return vsuxseg5ei32_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
1601 }
1602 
1603 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16mf4(
1604 // CHECK-RV64-NEXT:  entry:
1605 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1606 // CHECK-RV64-NEXT:    ret void
1607 //
test_vsuxseg6ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)1608 void test_vsuxseg6ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
1609   return vsuxseg6ei32_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1610 }
1611 
1612 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16mf4(
1613 // CHECK-RV64-NEXT:  entry:
1614 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1615 // CHECK-RV64-NEXT:    ret void
1616 //
test_vsuxseg7ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)1617 void test_vsuxseg7ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
1618   return vsuxseg7ei32_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1619 }
1620 
1621 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16mf4(
1622 // CHECK-RV64-NEXT:  entry:
1623 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1624 // CHECK-RV64-NEXT:    ret void
1625 //
test_vsuxseg8ei32_v_i16mf4(int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)1626 void test_vsuxseg8ei32_v_i16mf4 (int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
1627   return vsuxseg8ei32_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1628 }
1629 
1630 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16mf2(
1631 // CHECK-RV64-NEXT:  entry:
1632 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1633 // CHECK-RV64-NEXT:    ret void
1634 //
test_vsuxseg2ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)1635 void test_vsuxseg2ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
1636   return vsuxseg2ei32_v_i16mf2(base, bindex, v0, v1, vl);
1637 }
1638 
1639 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16mf2(
1640 // CHECK-RV64-NEXT:  entry:
1641 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1642 // CHECK-RV64-NEXT:    ret void
1643 //
test_vsuxseg3ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)1644 void test_vsuxseg3ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
1645   return vsuxseg3ei32_v_i16mf2(base, bindex, v0, v1, v2, vl);
1646 }
1647 
1648 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16mf2(
1649 // CHECK-RV64-NEXT:  entry:
1650 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1651 // CHECK-RV64-NEXT:    ret void
1652 //
test_vsuxseg4ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)1653 void test_vsuxseg4ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
1654   return vsuxseg4ei32_v_i16mf2(base, bindex, v0, v1, v2, v3, vl);
1655 }
1656 
1657 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16mf2(
1658 // CHECK-RV64-NEXT:  entry:
1659 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1660 // CHECK-RV64-NEXT:    ret void
1661 //
test_vsuxseg5ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)1662 void test_vsuxseg5ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
1663   return vsuxseg5ei32_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
1664 }
1665 
1666 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16mf2(
1667 // CHECK-RV64-NEXT:  entry:
1668 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1669 // CHECK-RV64-NEXT:    ret void
1670 //
test_vsuxseg6ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)1671 void test_vsuxseg6ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
1672   return vsuxseg6ei32_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1673 }
1674 
1675 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16mf2(
1676 // CHECK-RV64-NEXT:  entry:
1677 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1678 // CHECK-RV64-NEXT:    ret void
1679 //
test_vsuxseg7ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)1680 void test_vsuxseg7ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
1681   return vsuxseg7ei32_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1682 }
1683 
1684 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16mf2(
1685 // CHECK-RV64-NEXT:  entry:
1686 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1687 // CHECK-RV64-NEXT:    ret void
1688 //
test_vsuxseg8ei32_v_i16mf2(int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)1689 void test_vsuxseg8ei32_v_i16mf2 (int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
1690   return vsuxseg8ei32_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1691 }
1692 
1693 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m1(
1694 // CHECK-RV64-NEXT:  entry:
1695 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1696 // CHECK-RV64-NEXT:    ret void
1697 //
test_vsuxseg2ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)1698 void test_vsuxseg2ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
1699   return vsuxseg2ei32_v_i16m1(base, bindex, v0, v1, vl);
1700 }
1701 
1702 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16m1(
1703 // CHECK-RV64-NEXT:  entry:
1704 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1705 // CHECK-RV64-NEXT:    ret void
1706 //
test_vsuxseg3ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)1707 void test_vsuxseg3ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
1708   return vsuxseg3ei32_v_i16m1(base, bindex, v0, v1, v2, vl);
1709 }
1710 
1711 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16m1(
1712 // CHECK-RV64-NEXT:  entry:
1713 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1714 // CHECK-RV64-NEXT:    ret void
1715 //
test_vsuxseg4ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)1716 void test_vsuxseg4ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
1717   return vsuxseg4ei32_v_i16m1(base, bindex, v0, v1, v2, v3, vl);
1718 }
1719 
1720 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16m1(
1721 // CHECK-RV64-NEXT:  entry:
1722 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1723 // CHECK-RV64-NEXT:    ret void
1724 //
test_vsuxseg5ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)1725 void test_vsuxseg5ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
1726   return vsuxseg5ei32_v_i16m1(base, bindex, v0, v1, v2, v3, v4, vl);
1727 }
1728 
1729 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16m1(
1730 // CHECK-RV64-NEXT:  entry:
1731 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1732 // CHECK-RV64-NEXT:    ret void
1733 //
test_vsuxseg6ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)1734 void test_vsuxseg6ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
1735   return vsuxseg6ei32_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1736 }
1737 
1738 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16m1(
1739 // CHECK-RV64-NEXT:  entry:
1740 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1741 // CHECK-RV64-NEXT:    ret void
1742 //
test_vsuxseg7ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)1743 void test_vsuxseg7ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
1744   return vsuxseg7ei32_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1745 }
1746 
1747 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16m1(
1748 // CHECK-RV64-NEXT:  entry:
1749 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1750 // CHECK-RV64-NEXT:    ret void
1751 //
test_vsuxseg8ei32_v_i16m1(int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)1752 void test_vsuxseg8ei32_v_i16m1 (int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
1753   return vsuxseg8ei32_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1754 }
1755 
1756 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m2(
1757 // CHECK-RV64-NEXT:  entry:
1758 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1759 // CHECK-RV64-NEXT:    ret void
1760 //
test_vsuxseg2ei32_v_i16m2(int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)1761 void test_vsuxseg2ei32_v_i16m2 (int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
1762   return vsuxseg2ei32_v_i16m2(base, bindex, v0, v1, vl);
1763 }
1764 
1765 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16m2(
1766 // CHECK-RV64-NEXT:  entry:
1767 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1768 // CHECK-RV64-NEXT:    ret void
1769 //
test_vsuxseg3ei32_v_i16m2(int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)1770 void test_vsuxseg3ei32_v_i16m2 (int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
1771   return vsuxseg3ei32_v_i16m2(base, bindex, v0, v1, v2, vl);
1772 }
1773 
1774 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16m2(
1775 // CHECK-RV64-NEXT:  entry:
1776 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1777 // CHECK-RV64-NEXT:    ret void
1778 //
test_vsuxseg4ei32_v_i16m2(int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)1779 void test_vsuxseg4ei32_v_i16m2 (int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
1780   return vsuxseg4ei32_v_i16m2(base, bindex, v0, v1, v2, v3, vl);
1781 }
1782 
1783 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m4(
1784 // CHECK-RV64-NEXT:  entry:
1785 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
1786 // CHECK-RV64-NEXT:    ret void
1787 //
test_vsuxseg2ei32_v_i16m4(int16_t * base,vuint32m8_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)1788 void test_vsuxseg2ei32_v_i16m4 (int16_t *base, vuint32m8_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
1789   return vsuxseg2ei32_v_i16m4(base, bindex, v0, v1, vl);
1790 }
1791 
1792 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16mf4(
1793 // CHECK-RV64-NEXT:  entry:
1794 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1795 // CHECK-RV64-NEXT:    ret void
1796 //
test_vsuxseg2ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)1797 void test_vsuxseg2ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
1798   return vsuxseg2ei64_v_i16mf4(base, bindex, v0, v1, vl);
1799 }
1800 
1801 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16mf4(
1802 // CHECK-RV64-NEXT:  entry:
1803 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1804 // CHECK-RV64-NEXT:    ret void
1805 //
test_vsuxseg3ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)1806 void test_vsuxseg3ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
1807   return vsuxseg3ei64_v_i16mf4(base, bindex, v0, v1, v2, vl);
1808 }
1809 
1810 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16mf4(
1811 // CHECK-RV64-NEXT:  entry:
1812 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1813 // CHECK-RV64-NEXT:    ret void
1814 //
test_vsuxseg4ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)1815 void test_vsuxseg4ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
1816   return vsuxseg4ei64_v_i16mf4(base, bindex, v0, v1, v2, v3, vl);
1817 }
1818 
1819 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16mf4(
1820 // CHECK-RV64-NEXT:  entry:
1821 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1822 // CHECK-RV64-NEXT:    ret void
1823 //
test_vsuxseg5ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)1824 void test_vsuxseg5ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
1825   return vsuxseg5ei64_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
1826 }
1827 
1828 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16mf4(
1829 // CHECK-RV64-NEXT:  entry:
1830 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1831 // CHECK-RV64-NEXT:    ret void
1832 //
test_vsuxseg6ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)1833 void test_vsuxseg6ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
1834   return vsuxseg6ei64_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1835 }
1836 
1837 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16mf4(
1838 // CHECK-RV64-NEXT:  entry:
1839 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1840 // CHECK-RV64-NEXT:    ret void
1841 //
test_vsuxseg7ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)1842 void test_vsuxseg7ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
1843   return vsuxseg7ei64_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1844 }
1845 
1846 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16mf4(
1847 // CHECK-RV64-NEXT:  entry:
1848 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1849 // CHECK-RV64-NEXT:    ret void
1850 //
test_vsuxseg8ei64_v_i16mf4(int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)1851 void test_vsuxseg8ei64_v_i16mf4 (int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
1852   return vsuxseg8ei64_v_i16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1853 }
1854 
1855 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16mf2(
1856 // CHECK-RV64-NEXT:  entry:
1857 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1858 // CHECK-RV64-NEXT:    ret void
1859 //
test_vsuxseg2ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)1860 void test_vsuxseg2ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
1861   return vsuxseg2ei64_v_i16mf2(base, bindex, v0, v1, vl);
1862 }
1863 
1864 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16mf2(
1865 // CHECK-RV64-NEXT:  entry:
1866 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1867 // CHECK-RV64-NEXT:    ret void
1868 //
test_vsuxseg3ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)1869 void test_vsuxseg3ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
1870   return vsuxseg3ei64_v_i16mf2(base, bindex, v0, v1, v2, vl);
1871 }
1872 
1873 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16mf2(
1874 // CHECK-RV64-NEXT:  entry:
1875 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1876 // CHECK-RV64-NEXT:    ret void
1877 //
test_vsuxseg4ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)1878 void test_vsuxseg4ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
1879   return vsuxseg4ei64_v_i16mf2(base, bindex, v0, v1, v2, v3, vl);
1880 }
1881 
1882 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16mf2(
1883 // CHECK-RV64-NEXT:  entry:
1884 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1885 // CHECK-RV64-NEXT:    ret void
1886 //
test_vsuxseg5ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)1887 void test_vsuxseg5ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
1888   return vsuxseg5ei64_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
1889 }
1890 
1891 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16mf2(
1892 // CHECK-RV64-NEXT:  entry:
1893 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1894 // CHECK-RV64-NEXT:    ret void
1895 //
test_vsuxseg6ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)1896 void test_vsuxseg6ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
1897   return vsuxseg6ei64_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1898 }
1899 
1900 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16mf2(
1901 // CHECK-RV64-NEXT:  entry:
1902 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1903 // CHECK-RV64-NEXT:    ret void
1904 //
test_vsuxseg7ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)1905 void test_vsuxseg7ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
1906   return vsuxseg7ei64_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1907 }
1908 
1909 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16mf2(
1910 // CHECK-RV64-NEXT:  entry:
1911 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1912 // CHECK-RV64-NEXT:    ret void
1913 //
test_vsuxseg8ei64_v_i16mf2(int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)1914 void test_vsuxseg8ei64_v_i16mf2 (int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
1915   return vsuxseg8ei64_v_i16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1916 }
1917 
1918 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16m1(
1919 // CHECK-RV64-NEXT:  entry:
1920 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1921 // CHECK-RV64-NEXT:    ret void
1922 //
test_vsuxseg2ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)1923 void test_vsuxseg2ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
1924   return vsuxseg2ei64_v_i16m1(base, bindex, v0, v1, vl);
1925 }
1926 
1927 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16m1(
1928 // CHECK-RV64-NEXT:  entry:
1929 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1930 // CHECK-RV64-NEXT:    ret void
1931 //
test_vsuxseg3ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)1932 void test_vsuxseg3ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
1933   return vsuxseg3ei64_v_i16m1(base, bindex, v0, v1, v2, vl);
1934 }
1935 
1936 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16m1(
1937 // CHECK-RV64-NEXT:  entry:
1938 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1939 // CHECK-RV64-NEXT:    ret void
1940 //
test_vsuxseg4ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)1941 void test_vsuxseg4ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
1942   return vsuxseg4ei64_v_i16m1(base, bindex, v0, v1, v2, v3, vl);
1943 }
1944 
1945 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16m1(
1946 // CHECK-RV64-NEXT:  entry:
1947 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1948 // CHECK-RV64-NEXT:    ret void
1949 //
test_vsuxseg5ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)1950 void test_vsuxseg5ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
1951   return vsuxseg5ei64_v_i16m1(base, bindex, v0, v1, v2, v3, v4, vl);
1952 }
1953 
1954 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16m1(
1955 // CHECK-RV64-NEXT:  entry:
1956 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1957 // CHECK-RV64-NEXT:    ret void
1958 //
test_vsuxseg6ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)1959 void test_vsuxseg6ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
1960   return vsuxseg6ei64_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
1961 }
1962 
1963 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16m1(
1964 // CHECK-RV64-NEXT:  entry:
1965 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1966 // CHECK-RV64-NEXT:    ret void
1967 //
test_vsuxseg7ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)1968 void test_vsuxseg7ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
1969   return vsuxseg7ei64_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
1970 }
1971 
1972 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16m1(
1973 // CHECK-RV64-NEXT:  entry:
1974 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1975 // CHECK-RV64-NEXT:    ret void
1976 //
test_vsuxseg8ei64_v_i16m1(int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)1977 void test_vsuxseg8ei64_v_i16m1 (int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
1978   return vsuxseg8ei64_v_i16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
1979 }
1980 
1981 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16m2(
1982 // CHECK-RV64-NEXT:  entry:
1983 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1984 // CHECK-RV64-NEXT:    ret void
1985 //
test_vsuxseg2ei64_v_i16m2(int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)1986 void test_vsuxseg2ei64_v_i16m2 (int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
1987   return vsuxseg2ei64_v_i16m2(base, bindex, v0, v1, vl);
1988 }
1989 
1990 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16m2(
1991 // CHECK-RV64-NEXT:  entry:
1992 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
1993 // CHECK-RV64-NEXT:    ret void
1994 //
test_vsuxseg3ei64_v_i16m2(int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)1995 void test_vsuxseg3ei64_v_i16m2 (int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
1996   return vsuxseg3ei64_v_i16m2(base, bindex, v0, v1, v2, vl);
1997 }
1998 
1999 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16m2(
2000 // CHECK-RV64-NEXT:  entry:
2001 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2002 // CHECK-RV64-NEXT:    ret void
2003 //
test_vsuxseg4ei64_v_i16m2(int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)2004 void test_vsuxseg4ei64_v_i16m2 (int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
2005   return vsuxseg4ei64_v_i16m2(base, bindex, v0, v1, v2, v3, vl);
2006 }
2007 
2008 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32mf2(
2009 // CHECK-RV64-NEXT:  entry:
2010 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2011 // CHECK-RV64-NEXT:    ret void
2012 //
test_vsuxseg2ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)2013 void test_vsuxseg2ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
2014   return vsuxseg2ei8_v_i32mf2(base, bindex, v0, v1, vl);
2015 }
2016 
2017 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32mf2(
2018 // CHECK-RV64-NEXT:  entry:
2019 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2020 // CHECK-RV64-NEXT:    ret void
2021 //
test_vsuxseg3ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)2022 void test_vsuxseg3ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
2023   return vsuxseg3ei8_v_i32mf2(base, bindex, v0, v1, v2, vl);
2024 }
2025 
2026 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32mf2(
2027 // CHECK-RV64-NEXT:  entry:
2028 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2029 // CHECK-RV64-NEXT:    ret void
2030 //
test_vsuxseg4ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)2031 void test_vsuxseg4ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
2032   return vsuxseg4ei8_v_i32mf2(base, bindex, v0, v1, v2, v3, vl);
2033 }
2034 
2035 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i32mf2(
2036 // CHECK-RV64-NEXT:  entry:
2037 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2038 // CHECK-RV64-NEXT:    ret void
2039 //
test_vsuxseg5ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)2040 void test_vsuxseg5ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
2041   return vsuxseg5ei8_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
2042 }
2043 
2044 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i32mf2(
2045 // CHECK-RV64-NEXT:  entry:
2046 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2047 // CHECK-RV64-NEXT:    ret void
2048 //
test_vsuxseg6ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)2049 void test_vsuxseg6ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
2050   return vsuxseg6ei8_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2051 }
2052 
2053 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i32mf2(
2054 // CHECK-RV64-NEXT:  entry:
2055 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2056 // CHECK-RV64-NEXT:    ret void
2057 //
test_vsuxseg7ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)2058 void test_vsuxseg7ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
2059   return vsuxseg7ei8_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2060 }
2061 
2062 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i32mf2(
2063 // CHECK-RV64-NEXT:  entry:
2064 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2065 // CHECK-RV64-NEXT:    ret void
2066 //
test_vsuxseg8ei8_v_i32mf2(int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)2067 void test_vsuxseg8ei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
2068   return vsuxseg8ei8_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2069 }
2070 
2071 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m1(
2072 // CHECK-RV64-NEXT:  entry:
2073 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2074 // CHECK-RV64-NEXT:    ret void
2075 //
test_vsuxseg2ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)2076 void test_vsuxseg2ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
2077   return vsuxseg2ei8_v_i32m1(base, bindex, v0, v1, vl);
2078 }
2079 
2080 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32m1(
2081 // CHECK-RV64-NEXT:  entry:
2082 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2083 // CHECK-RV64-NEXT:    ret void
2084 //
test_vsuxseg3ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)2085 void test_vsuxseg3ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
2086   return vsuxseg3ei8_v_i32m1(base, bindex, v0, v1, v2, vl);
2087 }
2088 
2089 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32m1(
2090 // CHECK-RV64-NEXT:  entry:
2091 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2092 // CHECK-RV64-NEXT:    ret void
2093 //
test_vsuxseg4ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)2094 void test_vsuxseg4ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
2095   return vsuxseg4ei8_v_i32m1(base, bindex, v0, v1, v2, v3, vl);
2096 }
2097 
2098 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i32m1(
2099 // CHECK-RV64-NEXT:  entry:
2100 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2101 // CHECK-RV64-NEXT:    ret void
2102 //
test_vsuxseg5ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)2103 void test_vsuxseg5ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
2104   return vsuxseg5ei8_v_i32m1(base, bindex, v0, v1, v2, v3, v4, vl);
2105 }
2106 
2107 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i32m1(
2108 // CHECK-RV64-NEXT:  entry:
2109 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2110 // CHECK-RV64-NEXT:    ret void
2111 //
test_vsuxseg6ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)2112 void test_vsuxseg6ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
2113   return vsuxseg6ei8_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2114 }
2115 
2116 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i32m1(
2117 // CHECK-RV64-NEXT:  entry:
2118 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2119 // CHECK-RV64-NEXT:    ret void
2120 //
test_vsuxseg7ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)2121 void test_vsuxseg7ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
2122   return vsuxseg7ei8_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2123 }
2124 
2125 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i32m1(
2126 // CHECK-RV64-NEXT:  entry:
2127 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2128 // CHECK-RV64-NEXT:    ret void
2129 //
test_vsuxseg8ei8_v_i32m1(int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)2130 void test_vsuxseg8ei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
2131   return vsuxseg8ei8_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2132 }
2133 
2134 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m2(
2135 // CHECK-RV64-NEXT:  entry:
2136 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2137 // CHECK-RV64-NEXT:    ret void
2138 //
test_vsuxseg2ei8_v_i32m2(int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)2139 void test_vsuxseg2ei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
2140   return vsuxseg2ei8_v_i32m2(base, bindex, v0, v1, vl);
2141 }
2142 
2143 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32m2(
2144 // CHECK-RV64-NEXT:  entry:
2145 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2146 // CHECK-RV64-NEXT:    ret void
2147 //
test_vsuxseg3ei8_v_i32m2(int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)2148 void test_vsuxseg3ei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
2149   return vsuxseg3ei8_v_i32m2(base, bindex, v0, v1, v2, vl);
2150 }
2151 
2152 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32m2(
2153 // CHECK-RV64-NEXT:  entry:
2154 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2155 // CHECK-RV64-NEXT:    ret void
2156 //
test_vsuxseg4ei8_v_i32m2(int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)2157 void test_vsuxseg4ei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
2158   return vsuxseg4ei8_v_i32m2(base, bindex, v0, v1, v2, v3, vl);
2159 }
2160 
2161 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m4(
2162 // CHECK-RV64-NEXT:  entry:
2163 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2164 // CHECK-RV64-NEXT:    ret void
2165 //
test_vsuxseg2ei8_v_i32m4(int32_t * base,vuint8m1_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)2166 void test_vsuxseg2ei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
2167   return vsuxseg2ei8_v_i32m4(base, bindex, v0, v1, vl);
2168 }
2169 
2170 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32mf2(
2171 // CHECK-RV64-NEXT:  entry:
2172 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2173 // CHECK-RV64-NEXT:    ret void
2174 //
test_vsuxseg2ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)2175 void test_vsuxseg2ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
2176   return vsuxseg2ei16_v_i32mf2(base, bindex, v0, v1, vl);
2177 }
2178 
2179 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32mf2(
2180 // CHECK-RV64-NEXT:  entry:
2181 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2182 // CHECK-RV64-NEXT:    ret void
2183 //
test_vsuxseg3ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)2184 void test_vsuxseg3ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
2185   return vsuxseg3ei16_v_i32mf2(base, bindex, v0, v1, v2, vl);
2186 }
2187 
2188 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32mf2(
2189 // CHECK-RV64-NEXT:  entry:
2190 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2191 // CHECK-RV64-NEXT:    ret void
2192 //
test_vsuxseg4ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)2193 void test_vsuxseg4ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
2194   return vsuxseg4ei16_v_i32mf2(base, bindex, v0, v1, v2, v3, vl);
2195 }
2196 
2197 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i32mf2(
2198 // CHECK-RV64-NEXT:  entry:
2199 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2200 // CHECK-RV64-NEXT:    ret void
2201 //
test_vsuxseg5ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)2202 void test_vsuxseg5ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
2203   return vsuxseg5ei16_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
2204 }
2205 
2206 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i32mf2(
2207 // CHECK-RV64-NEXT:  entry:
2208 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2209 // CHECK-RV64-NEXT:    ret void
2210 //
test_vsuxseg6ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)2211 void test_vsuxseg6ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
2212   return vsuxseg6ei16_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2213 }
2214 
2215 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i32mf2(
2216 // CHECK-RV64-NEXT:  entry:
2217 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2218 // CHECK-RV64-NEXT:    ret void
2219 //
test_vsuxseg7ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)2220 void test_vsuxseg7ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
2221   return vsuxseg7ei16_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2222 }
2223 
2224 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i32mf2(
2225 // CHECK-RV64-NEXT:  entry:
2226 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2227 // CHECK-RV64-NEXT:    ret void
2228 //
test_vsuxseg8ei16_v_i32mf2(int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)2229 void test_vsuxseg8ei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
2230   return vsuxseg8ei16_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2231 }
2232 
2233 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m1(
2234 // CHECK-RV64-NEXT:  entry:
2235 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2236 // CHECK-RV64-NEXT:    ret void
2237 //
test_vsuxseg2ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)2238 void test_vsuxseg2ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
2239   return vsuxseg2ei16_v_i32m1(base, bindex, v0, v1, vl);
2240 }
2241 
2242 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32m1(
2243 // CHECK-RV64-NEXT:  entry:
2244 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2245 // CHECK-RV64-NEXT:    ret void
2246 //
test_vsuxseg3ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)2247 void test_vsuxseg3ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
2248   return vsuxseg3ei16_v_i32m1(base, bindex, v0, v1, v2, vl);
2249 }
2250 
2251 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32m1(
2252 // CHECK-RV64-NEXT:  entry:
2253 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2254 // CHECK-RV64-NEXT:    ret void
2255 //
test_vsuxseg4ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)2256 void test_vsuxseg4ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
2257   return vsuxseg4ei16_v_i32m1(base, bindex, v0, v1, v2, v3, vl);
2258 }
2259 
2260 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i32m1(
2261 // CHECK-RV64-NEXT:  entry:
2262 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2263 // CHECK-RV64-NEXT:    ret void
2264 //
test_vsuxseg5ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)2265 void test_vsuxseg5ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
2266   return vsuxseg5ei16_v_i32m1(base, bindex, v0, v1, v2, v3, v4, vl);
2267 }
2268 
2269 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i32m1(
2270 // CHECK-RV64-NEXT:  entry:
2271 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2272 // CHECK-RV64-NEXT:    ret void
2273 //
test_vsuxseg6ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)2274 void test_vsuxseg6ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
2275   return vsuxseg6ei16_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2276 }
2277 
2278 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i32m1(
2279 // CHECK-RV64-NEXT:  entry:
2280 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2281 // CHECK-RV64-NEXT:    ret void
2282 //
test_vsuxseg7ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)2283 void test_vsuxseg7ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
2284   return vsuxseg7ei16_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2285 }
2286 
2287 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i32m1(
2288 // CHECK-RV64-NEXT:  entry:
2289 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2290 // CHECK-RV64-NEXT:    ret void
2291 //
test_vsuxseg8ei16_v_i32m1(int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)2292 void test_vsuxseg8ei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
2293   return vsuxseg8ei16_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2294 }
2295 
2296 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m2(
2297 // CHECK-RV64-NEXT:  entry:
2298 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2299 // CHECK-RV64-NEXT:    ret void
2300 //
test_vsuxseg2ei16_v_i32m2(int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)2301 void test_vsuxseg2ei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
2302   return vsuxseg2ei16_v_i32m2(base, bindex, v0, v1, vl);
2303 }
2304 
2305 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32m2(
2306 // CHECK-RV64-NEXT:  entry:
2307 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2308 // CHECK-RV64-NEXT:    ret void
2309 //
test_vsuxseg3ei16_v_i32m2(int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)2310 void test_vsuxseg3ei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
2311   return vsuxseg3ei16_v_i32m2(base, bindex, v0, v1, v2, vl);
2312 }
2313 
2314 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32m2(
2315 // CHECK-RV64-NEXT:  entry:
2316 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2317 // CHECK-RV64-NEXT:    ret void
2318 //
test_vsuxseg4ei16_v_i32m2(int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)2319 void test_vsuxseg4ei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
2320   return vsuxseg4ei16_v_i32m2(base, bindex, v0, v1, v2, v3, vl);
2321 }
2322 
2323 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m4(
2324 // CHECK-RV64-NEXT:  entry:
2325 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2326 // CHECK-RV64-NEXT:    ret void
2327 //
test_vsuxseg2ei16_v_i32m4(int32_t * base,vuint16m2_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)2328 void test_vsuxseg2ei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
2329   return vsuxseg2ei16_v_i32m4(base, bindex, v0, v1, vl);
2330 }
2331 
2332 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32mf2(
2333 // CHECK-RV64-NEXT:  entry:
2334 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2335 // CHECK-RV64-NEXT:    ret void
2336 //
test_vsuxseg2ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)2337 void test_vsuxseg2ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
2338   return vsuxseg2ei32_v_i32mf2(base, bindex, v0, v1, vl);
2339 }
2340 
2341 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32mf2(
2342 // CHECK-RV64-NEXT:  entry:
2343 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2344 // CHECK-RV64-NEXT:    ret void
2345 //
test_vsuxseg3ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)2346 void test_vsuxseg3ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
2347   return vsuxseg3ei32_v_i32mf2(base, bindex, v0, v1, v2, vl);
2348 }
2349 
2350 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32mf2(
2351 // CHECK-RV64-NEXT:  entry:
2352 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2353 // CHECK-RV64-NEXT:    ret void
2354 //
test_vsuxseg4ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)2355 void test_vsuxseg4ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
2356   return vsuxseg4ei32_v_i32mf2(base, bindex, v0, v1, v2, v3, vl);
2357 }
2358 
2359 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i32mf2(
2360 // CHECK-RV64-NEXT:  entry:
2361 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2362 // CHECK-RV64-NEXT:    ret void
2363 //
test_vsuxseg5ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)2364 void test_vsuxseg5ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
2365   return vsuxseg5ei32_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
2366 }
2367 
2368 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i32mf2(
2369 // CHECK-RV64-NEXT:  entry:
2370 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2371 // CHECK-RV64-NEXT:    ret void
2372 //
test_vsuxseg6ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)2373 void test_vsuxseg6ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
2374   return vsuxseg6ei32_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2375 }
2376 
2377 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i32mf2(
2378 // CHECK-RV64-NEXT:  entry:
2379 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2380 // CHECK-RV64-NEXT:    ret void
2381 //
test_vsuxseg7ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)2382 void test_vsuxseg7ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
2383   return vsuxseg7ei32_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2384 }
2385 
2386 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i32mf2(
2387 // CHECK-RV64-NEXT:  entry:
2388 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2389 // CHECK-RV64-NEXT:    ret void
2390 //
test_vsuxseg8ei32_v_i32mf2(int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)2391 void test_vsuxseg8ei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
2392   return vsuxseg8ei32_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2393 }
2394 
2395 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m1(
2396 // CHECK-RV64-NEXT:  entry:
2397 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2398 // CHECK-RV64-NEXT:    ret void
2399 //
test_vsuxseg2ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)2400 void test_vsuxseg2ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
2401   return vsuxseg2ei32_v_i32m1(base, bindex, v0, v1, vl);
2402 }
2403 
2404 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32m1(
2405 // CHECK-RV64-NEXT:  entry:
2406 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2407 // CHECK-RV64-NEXT:    ret void
2408 //
test_vsuxseg3ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)2409 void test_vsuxseg3ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
2410   return vsuxseg3ei32_v_i32m1(base, bindex, v0, v1, v2, vl);
2411 }
2412 
2413 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32m1(
2414 // CHECK-RV64-NEXT:  entry:
2415 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2416 // CHECK-RV64-NEXT:    ret void
2417 //
test_vsuxseg4ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)2418 void test_vsuxseg4ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
2419   return vsuxseg4ei32_v_i32m1(base, bindex, v0, v1, v2, v3, vl);
2420 }
2421 
2422 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i32m1(
2423 // CHECK-RV64-NEXT:  entry:
2424 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2425 // CHECK-RV64-NEXT:    ret void
2426 //
test_vsuxseg5ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)2427 void test_vsuxseg5ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
2428   return vsuxseg5ei32_v_i32m1(base, bindex, v0, v1, v2, v3, v4, vl);
2429 }
2430 
2431 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i32m1(
2432 // CHECK-RV64-NEXT:  entry:
2433 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2434 // CHECK-RV64-NEXT:    ret void
2435 //
test_vsuxseg6ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)2436 void test_vsuxseg6ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
2437   return vsuxseg6ei32_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2438 }
2439 
2440 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i32m1(
2441 // CHECK-RV64-NEXT:  entry:
2442 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2443 // CHECK-RV64-NEXT:    ret void
2444 //
test_vsuxseg7ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)2445 void test_vsuxseg7ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
2446   return vsuxseg7ei32_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2447 }
2448 
2449 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i32m1(
2450 // CHECK-RV64-NEXT:  entry:
2451 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2452 // CHECK-RV64-NEXT:    ret void
2453 //
test_vsuxseg8ei32_v_i32m1(int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)2454 void test_vsuxseg8ei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
2455   return vsuxseg8ei32_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2456 }
2457 
2458 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m2(
2459 // CHECK-RV64-NEXT:  entry:
2460 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2461 // CHECK-RV64-NEXT:    ret void
2462 //
test_vsuxseg2ei32_v_i32m2(int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)2463 void test_vsuxseg2ei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
2464   return vsuxseg2ei32_v_i32m2(base, bindex, v0, v1, vl);
2465 }
2466 
2467 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32m2(
2468 // CHECK-RV64-NEXT:  entry:
2469 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2470 // CHECK-RV64-NEXT:    ret void
2471 //
test_vsuxseg3ei32_v_i32m2(int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)2472 void test_vsuxseg3ei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
2473   return vsuxseg3ei32_v_i32m2(base, bindex, v0, v1, v2, vl);
2474 }
2475 
2476 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32m2(
2477 // CHECK-RV64-NEXT:  entry:
2478 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2479 // CHECK-RV64-NEXT:    ret void
2480 //
test_vsuxseg4ei32_v_i32m2(int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)2481 void test_vsuxseg4ei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
2482   return vsuxseg4ei32_v_i32m2(base, bindex, v0, v1, v2, v3, vl);
2483 }
2484 
2485 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m4(
2486 // CHECK-RV64-NEXT:  entry:
2487 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2488 // CHECK-RV64-NEXT:    ret void
2489 //
test_vsuxseg2ei32_v_i32m4(int32_t * base,vuint32m4_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)2490 void test_vsuxseg2ei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
2491   return vsuxseg2ei32_v_i32m4(base, bindex, v0, v1, vl);
2492 }
2493 
2494 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32mf2(
2495 // CHECK-RV64-NEXT:  entry:
2496 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2497 // CHECK-RV64-NEXT:    ret void
2498 //
test_vsuxseg2ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)2499 void test_vsuxseg2ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
2500   return vsuxseg2ei64_v_i32mf2(base, bindex, v0, v1, vl);
2501 }
2502 
2503 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32mf2(
2504 // CHECK-RV64-NEXT:  entry:
2505 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2506 // CHECK-RV64-NEXT:    ret void
2507 //
test_vsuxseg3ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)2508 void test_vsuxseg3ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
2509   return vsuxseg3ei64_v_i32mf2(base, bindex, v0, v1, v2, vl);
2510 }
2511 
2512 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32mf2(
2513 // CHECK-RV64-NEXT:  entry:
2514 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2515 // CHECK-RV64-NEXT:    ret void
2516 //
test_vsuxseg4ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)2517 void test_vsuxseg4ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
2518   return vsuxseg4ei64_v_i32mf2(base, bindex, v0, v1, v2, v3, vl);
2519 }
2520 
2521 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i32mf2(
2522 // CHECK-RV64-NEXT:  entry:
2523 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2524 // CHECK-RV64-NEXT:    ret void
2525 //
test_vsuxseg5ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)2526 void test_vsuxseg5ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
2527   return vsuxseg5ei64_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
2528 }
2529 
2530 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i32mf2(
2531 // CHECK-RV64-NEXT:  entry:
2532 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2533 // CHECK-RV64-NEXT:    ret void
2534 //
test_vsuxseg6ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)2535 void test_vsuxseg6ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
2536   return vsuxseg6ei64_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2537 }
2538 
2539 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i32mf2(
2540 // CHECK-RV64-NEXT:  entry:
2541 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2542 // CHECK-RV64-NEXT:    ret void
2543 //
test_vsuxseg7ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)2544 void test_vsuxseg7ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
2545   return vsuxseg7ei64_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2546 }
2547 
2548 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i32mf2(
2549 // CHECK-RV64-NEXT:  entry:
2550 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2551 // CHECK-RV64-NEXT:    ret void
2552 //
test_vsuxseg8ei64_v_i32mf2(int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)2553 void test_vsuxseg8ei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
2554   return vsuxseg8ei64_v_i32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2555 }
2556 
2557 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m1(
2558 // CHECK-RV64-NEXT:  entry:
2559 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2560 // CHECK-RV64-NEXT:    ret void
2561 //
test_vsuxseg2ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)2562 void test_vsuxseg2ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
2563   return vsuxseg2ei64_v_i32m1(base, bindex, v0, v1, vl);
2564 }
2565 
2566 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32m1(
2567 // CHECK-RV64-NEXT:  entry:
2568 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2569 // CHECK-RV64-NEXT:    ret void
2570 //
test_vsuxseg3ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)2571 void test_vsuxseg3ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
2572   return vsuxseg3ei64_v_i32m1(base, bindex, v0, v1, v2, vl);
2573 }
2574 
2575 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32m1(
2576 // CHECK-RV64-NEXT:  entry:
2577 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2578 // CHECK-RV64-NEXT:    ret void
2579 //
test_vsuxseg4ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)2580 void test_vsuxseg4ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
2581   return vsuxseg4ei64_v_i32m1(base, bindex, v0, v1, v2, v3, vl);
2582 }
2583 
2584 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i32m1(
2585 // CHECK-RV64-NEXT:  entry:
2586 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2587 // CHECK-RV64-NEXT:    ret void
2588 //
test_vsuxseg5ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)2589 void test_vsuxseg5ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
2590   return vsuxseg5ei64_v_i32m1(base, bindex, v0, v1, v2, v3, v4, vl);
2591 }
2592 
2593 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i32m1(
2594 // CHECK-RV64-NEXT:  entry:
2595 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2596 // CHECK-RV64-NEXT:    ret void
2597 //
test_vsuxseg6ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)2598 void test_vsuxseg6ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
2599   return vsuxseg6ei64_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2600 }
2601 
2602 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i32m1(
2603 // CHECK-RV64-NEXT:  entry:
2604 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2605 // CHECK-RV64-NEXT:    ret void
2606 //
test_vsuxseg7ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)2607 void test_vsuxseg7ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
2608   return vsuxseg7ei64_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2609 }
2610 
2611 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i32m1(
2612 // CHECK-RV64-NEXT:  entry:
2613 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2614 // CHECK-RV64-NEXT:    ret void
2615 //
test_vsuxseg8ei64_v_i32m1(int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)2616 void test_vsuxseg8ei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
2617   return vsuxseg8ei64_v_i32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2618 }
2619 
2620 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m2(
2621 // CHECK-RV64-NEXT:  entry:
2622 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2623 // CHECK-RV64-NEXT:    ret void
2624 //
test_vsuxseg2ei64_v_i32m2(int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)2625 void test_vsuxseg2ei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
2626   return vsuxseg2ei64_v_i32m2(base, bindex, v0, v1, vl);
2627 }
2628 
2629 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32m2(
2630 // CHECK-RV64-NEXT:  entry:
2631 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2632 // CHECK-RV64-NEXT:    ret void
2633 //
test_vsuxseg3ei64_v_i32m2(int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)2634 void test_vsuxseg3ei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
2635   return vsuxseg3ei64_v_i32m2(base, bindex, v0, v1, v2, vl);
2636 }
2637 
2638 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32m2(
2639 // CHECK-RV64-NEXT:  entry:
2640 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2641 // CHECK-RV64-NEXT:    ret void
2642 //
test_vsuxseg4ei64_v_i32m2(int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)2643 void test_vsuxseg4ei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
2644   return vsuxseg4ei64_v_i32m2(base, bindex, v0, v1, v2, v3, vl);
2645 }
2646 
2647 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m4(
2648 // CHECK-RV64-NEXT:  entry:
2649 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2650 // CHECK-RV64-NEXT:    ret void
2651 //
test_vsuxseg2ei64_v_i32m4(int32_t * base,vuint64m8_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)2652 void test_vsuxseg2ei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
2653   return vsuxseg2ei64_v_i32m4(base, bindex, v0, v1, vl);
2654 }
2655 
2656 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m1(
2657 // CHECK-RV64-NEXT:  entry:
2658 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2659 // CHECK-RV64-NEXT:    ret void
2660 //
test_vsuxseg2ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)2661 void test_vsuxseg2ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
2662   return vsuxseg2ei8_v_i64m1(base, bindex, v0, v1, vl);
2663 }
2664 
2665 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i64m1(
2666 // CHECK-RV64-NEXT:  entry:
2667 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2668 // CHECK-RV64-NEXT:    ret void
2669 //
test_vsuxseg3ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)2670 void test_vsuxseg3ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
2671   return vsuxseg3ei8_v_i64m1(base, bindex, v0, v1, v2, vl);
2672 }
2673 
2674 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i64m1(
2675 // CHECK-RV64-NEXT:  entry:
2676 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2677 // CHECK-RV64-NEXT:    ret void
2678 //
test_vsuxseg4ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)2679 void test_vsuxseg4ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
2680   return vsuxseg4ei8_v_i64m1(base, bindex, v0, v1, v2, v3, vl);
2681 }
2682 
2683 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i64m1(
2684 // CHECK-RV64-NEXT:  entry:
2685 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2686 // CHECK-RV64-NEXT:    ret void
2687 //
test_vsuxseg5ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)2688 void test_vsuxseg5ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
2689   return vsuxseg5ei8_v_i64m1(base, bindex, v0, v1, v2, v3, v4, vl);
2690 }
2691 
2692 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i64m1(
2693 // CHECK-RV64-NEXT:  entry:
2694 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2695 // CHECK-RV64-NEXT:    ret void
2696 //
test_vsuxseg6ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)2697 void test_vsuxseg6ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
2698   return vsuxseg6ei8_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2699 }
2700 
2701 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i64m1(
2702 // CHECK-RV64-NEXT:  entry:
2703 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2704 // CHECK-RV64-NEXT:    ret void
2705 //
test_vsuxseg7ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)2706 void test_vsuxseg7ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
2707   return vsuxseg7ei8_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2708 }
2709 
2710 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i64m1(
2711 // CHECK-RV64-NEXT:  entry:
2712 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2713 // CHECK-RV64-NEXT:    ret void
2714 //
test_vsuxseg8ei8_v_i64m1(int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)2715 void test_vsuxseg8ei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
2716   return vsuxseg8ei8_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2717 }
2718 
2719 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m2(
2720 // CHECK-RV64-NEXT:  entry:
2721 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2722 // CHECK-RV64-NEXT:    ret void
2723 //
test_vsuxseg2ei8_v_i64m2(int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)2724 void test_vsuxseg2ei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
2725   return vsuxseg2ei8_v_i64m2(base, bindex, v0, v1, vl);
2726 }
2727 
2728 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i64m2(
2729 // CHECK-RV64-NEXT:  entry:
2730 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2731 // CHECK-RV64-NEXT:    ret void
2732 //
test_vsuxseg3ei8_v_i64m2(int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)2733 void test_vsuxseg3ei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
2734   return vsuxseg3ei8_v_i64m2(base, bindex, v0, v1, v2, vl);
2735 }
2736 
2737 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i64m2(
2738 // CHECK-RV64-NEXT:  entry:
2739 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2740 // CHECK-RV64-NEXT:    ret void
2741 //
test_vsuxseg4ei8_v_i64m2(int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)2742 void test_vsuxseg4ei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
2743   return vsuxseg4ei8_v_i64m2(base, bindex, v0, v1, v2, v3, vl);
2744 }
2745 
2746 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m4(
2747 // CHECK-RV64-NEXT:  entry:
2748 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
2749 // CHECK-RV64-NEXT:    ret void
2750 //
test_vsuxseg2ei8_v_i64m4(int64_t * base,vuint8mf2_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)2751 void test_vsuxseg2ei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
2752   return vsuxseg2ei8_v_i64m4(base, bindex, v0, v1, vl);
2753 }
2754 
2755 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m1(
2756 // CHECK-RV64-NEXT:  entry:
2757 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2758 // CHECK-RV64-NEXT:    ret void
2759 //
test_vsuxseg2ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)2760 void test_vsuxseg2ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
2761   return vsuxseg2ei16_v_i64m1(base, bindex, v0, v1, vl);
2762 }
2763 
2764 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i64m1(
2765 // CHECK-RV64-NEXT:  entry:
2766 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2767 // CHECK-RV64-NEXT:    ret void
2768 //
test_vsuxseg3ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)2769 void test_vsuxseg3ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
2770   return vsuxseg3ei16_v_i64m1(base, bindex, v0, v1, v2, vl);
2771 }
2772 
2773 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i64m1(
2774 // CHECK-RV64-NEXT:  entry:
2775 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2776 // CHECK-RV64-NEXT:    ret void
2777 //
test_vsuxseg4ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)2778 void test_vsuxseg4ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
2779   return vsuxseg4ei16_v_i64m1(base, bindex, v0, v1, v2, v3, vl);
2780 }
2781 
2782 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i64m1(
2783 // CHECK-RV64-NEXT:  entry:
2784 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2785 // CHECK-RV64-NEXT:    ret void
2786 //
test_vsuxseg5ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)2787 void test_vsuxseg5ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
2788   return vsuxseg5ei16_v_i64m1(base, bindex, v0, v1, v2, v3, v4, vl);
2789 }
2790 
2791 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i64m1(
2792 // CHECK-RV64-NEXT:  entry:
2793 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2794 // CHECK-RV64-NEXT:    ret void
2795 //
test_vsuxseg6ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)2796 void test_vsuxseg6ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
2797   return vsuxseg6ei16_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2798 }
2799 
2800 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i64m1(
2801 // CHECK-RV64-NEXT:  entry:
2802 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2803 // CHECK-RV64-NEXT:    ret void
2804 //
test_vsuxseg7ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)2805 void test_vsuxseg7ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
2806   return vsuxseg7ei16_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2807 }
2808 
2809 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i64m1(
2810 // CHECK-RV64-NEXT:  entry:
2811 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2812 // CHECK-RV64-NEXT:    ret void
2813 //
test_vsuxseg8ei16_v_i64m1(int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)2814 void test_vsuxseg8ei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
2815   return vsuxseg8ei16_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2816 }
2817 
2818 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m2(
2819 // CHECK-RV64-NEXT:  entry:
2820 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2821 // CHECK-RV64-NEXT:    ret void
2822 //
test_vsuxseg2ei16_v_i64m2(int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)2823 void test_vsuxseg2ei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
2824   return vsuxseg2ei16_v_i64m2(base, bindex, v0, v1, vl);
2825 }
2826 
2827 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i64m2(
2828 // CHECK-RV64-NEXT:  entry:
2829 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2830 // CHECK-RV64-NEXT:    ret void
2831 //
test_vsuxseg3ei16_v_i64m2(int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)2832 void test_vsuxseg3ei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
2833   return vsuxseg3ei16_v_i64m2(base, bindex, v0, v1, v2, vl);
2834 }
2835 
2836 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i64m2(
2837 // CHECK-RV64-NEXT:  entry:
2838 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2839 // CHECK-RV64-NEXT:    ret void
2840 //
test_vsuxseg4ei16_v_i64m2(int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)2841 void test_vsuxseg4ei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
2842   return vsuxseg4ei16_v_i64m2(base, bindex, v0, v1, v2, v3, vl);
2843 }
2844 
2845 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m4(
2846 // CHECK-RV64-NEXT:  entry:
2847 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
2848 // CHECK-RV64-NEXT:    ret void
2849 //
test_vsuxseg2ei16_v_i64m4(int64_t * base,vuint16m1_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)2850 void test_vsuxseg2ei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
2851   return vsuxseg2ei16_v_i64m4(base, bindex, v0, v1, vl);
2852 }
2853 
2854 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m1(
2855 // CHECK-RV64-NEXT:  entry:
2856 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2857 // CHECK-RV64-NEXT:    ret void
2858 //
test_vsuxseg2ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)2859 void test_vsuxseg2ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
2860   return vsuxseg2ei32_v_i64m1(base, bindex, v0, v1, vl);
2861 }
2862 
2863 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i64m1(
2864 // CHECK-RV64-NEXT:  entry:
2865 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2866 // CHECK-RV64-NEXT:    ret void
2867 //
test_vsuxseg3ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)2868 void test_vsuxseg3ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
2869   return vsuxseg3ei32_v_i64m1(base, bindex, v0, v1, v2, vl);
2870 }
2871 
2872 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i64m1(
2873 // CHECK-RV64-NEXT:  entry:
2874 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2875 // CHECK-RV64-NEXT:    ret void
2876 //
test_vsuxseg4ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)2877 void test_vsuxseg4ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
2878   return vsuxseg4ei32_v_i64m1(base, bindex, v0, v1, v2, v3, vl);
2879 }
2880 
2881 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i64m1(
2882 // CHECK-RV64-NEXT:  entry:
2883 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2884 // CHECK-RV64-NEXT:    ret void
2885 //
test_vsuxseg5ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)2886 void test_vsuxseg5ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
2887   return vsuxseg5ei32_v_i64m1(base, bindex, v0, v1, v2, v3, v4, vl);
2888 }
2889 
2890 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i64m1(
2891 // CHECK-RV64-NEXT:  entry:
2892 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2893 // CHECK-RV64-NEXT:    ret void
2894 //
test_vsuxseg6ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)2895 void test_vsuxseg6ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
2896   return vsuxseg6ei32_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2897 }
2898 
2899 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i64m1(
2900 // CHECK-RV64-NEXT:  entry:
2901 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2902 // CHECK-RV64-NEXT:    ret void
2903 //
test_vsuxseg7ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)2904 void test_vsuxseg7ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
2905   return vsuxseg7ei32_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
2906 }
2907 
2908 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i64m1(
2909 // CHECK-RV64-NEXT:  entry:
2910 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2911 // CHECK-RV64-NEXT:    ret void
2912 //
test_vsuxseg8ei32_v_i64m1(int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)2913 void test_vsuxseg8ei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
2914   return vsuxseg8ei32_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
2915 }
2916 
2917 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m2(
2918 // CHECK-RV64-NEXT:  entry:
2919 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2920 // CHECK-RV64-NEXT:    ret void
2921 //
test_vsuxseg2ei32_v_i64m2(int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)2922 void test_vsuxseg2ei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
2923   return vsuxseg2ei32_v_i64m2(base, bindex, v0, v1, vl);
2924 }
2925 
2926 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i64m2(
2927 // CHECK-RV64-NEXT:  entry:
2928 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2929 // CHECK-RV64-NEXT:    ret void
2930 //
test_vsuxseg3ei32_v_i64m2(int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)2931 void test_vsuxseg3ei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
2932   return vsuxseg3ei32_v_i64m2(base, bindex, v0, v1, v2, vl);
2933 }
2934 
2935 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i64m2(
2936 // CHECK-RV64-NEXT:  entry:
2937 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2938 // CHECK-RV64-NEXT:    ret void
2939 //
test_vsuxseg4ei32_v_i64m2(int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)2940 void test_vsuxseg4ei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
2941   return vsuxseg4ei32_v_i64m2(base, bindex, v0, v1, v2, v3, vl);
2942 }
2943 
2944 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m4(
2945 // CHECK-RV64-NEXT:  entry:
2946 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
2947 // CHECK-RV64-NEXT:    ret void
2948 //
test_vsuxseg2ei32_v_i64m4(int64_t * base,vuint32m2_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)2949 void test_vsuxseg2ei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
2950   return vsuxseg2ei32_v_i64m4(base, bindex, v0, v1, vl);
2951 }
2952 
2953 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m1(
2954 // CHECK-RV64-NEXT:  entry:
2955 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2956 // CHECK-RV64-NEXT:    ret void
2957 //
test_vsuxseg2ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)2958 void test_vsuxseg2ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
2959   return vsuxseg2ei64_v_i64m1(base, bindex, v0, v1, vl);
2960 }
2961 
2962 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i64m1(
2963 // CHECK-RV64-NEXT:  entry:
2964 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2965 // CHECK-RV64-NEXT:    ret void
2966 //
test_vsuxseg3ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)2967 void test_vsuxseg3ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
2968   return vsuxseg3ei64_v_i64m1(base, bindex, v0, v1, v2, vl);
2969 }
2970 
2971 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i64m1(
2972 // CHECK-RV64-NEXT:  entry:
2973 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2974 // CHECK-RV64-NEXT:    ret void
2975 //
test_vsuxseg4ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)2976 void test_vsuxseg4ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
2977   return vsuxseg4ei64_v_i64m1(base, bindex, v0, v1, v2, v3, vl);
2978 }
2979 
2980 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i64m1(
2981 // CHECK-RV64-NEXT:  entry:
2982 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2983 // CHECK-RV64-NEXT:    ret void
2984 //
test_vsuxseg5ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)2985 void test_vsuxseg5ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
2986   return vsuxseg5ei64_v_i64m1(base, bindex, v0, v1, v2, v3, v4, vl);
2987 }
2988 
2989 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i64m1(
2990 // CHECK-RV64-NEXT:  entry:
2991 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
2992 // CHECK-RV64-NEXT:    ret void
2993 //
test_vsuxseg6ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)2994 void test_vsuxseg6ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
2995   return vsuxseg6ei64_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
2996 }
2997 
2998 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i64m1(
2999 // CHECK-RV64-NEXT:  entry:
3000 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3001 // CHECK-RV64-NEXT:    ret void
3002 //
test_vsuxseg7ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)3003 void test_vsuxseg7ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
3004   return vsuxseg7ei64_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3005 }
3006 
3007 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i64m1(
3008 // CHECK-RV64-NEXT:  entry:
3009 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3010 // CHECK-RV64-NEXT:    ret void
3011 //
test_vsuxseg8ei64_v_i64m1(int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)3012 void test_vsuxseg8ei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
3013   return vsuxseg8ei64_v_i64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3014 }
3015 
3016 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m2(
3017 // CHECK-RV64-NEXT:  entry:
3018 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3019 // CHECK-RV64-NEXT:    ret void
3020 //
test_vsuxseg2ei64_v_i64m2(int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)3021 void test_vsuxseg2ei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
3022   return vsuxseg2ei64_v_i64m2(base, bindex, v0, v1, vl);
3023 }
3024 
3025 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i64m2(
3026 // CHECK-RV64-NEXT:  entry:
3027 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3028 // CHECK-RV64-NEXT:    ret void
3029 //
test_vsuxseg3ei64_v_i64m2(int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)3030 void test_vsuxseg3ei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
3031   return vsuxseg3ei64_v_i64m2(base, bindex, v0, v1, v2, vl);
3032 }
3033 
3034 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i64m2(
3035 // CHECK-RV64-NEXT:  entry:
3036 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3037 // CHECK-RV64-NEXT:    ret void
3038 //
test_vsuxseg4ei64_v_i64m2(int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)3039 void test_vsuxseg4ei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
3040   return vsuxseg4ei64_v_i64m2(base, bindex, v0, v1, v2, v3, vl);
3041 }
3042 
3043 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m4(
3044 // CHECK-RV64-NEXT:  entry:
3045 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3046 // CHECK-RV64-NEXT:    ret void
3047 //
test_vsuxseg2ei64_v_i64m4(int64_t * base,vuint64m4_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)3048 void test_vsuxseg2ei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
3049   return vsuxseg2ei64_v_i64m4(base, bindex, v0, v1, vl);
3050 }
3051 
3052 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf8(
3053 // CHECK-RV64-NEXT:  entry:
3054 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3055 // CHECK-RV64-NEXT:    ret void
3056 //
test_vsuxseg2ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)3057 void test_vsuxseg2ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
3058   return vsuxseg2ei8_v_u8mf8(base, bindex, v0, v1, vl);
3059 }
3060 
3061 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf8(
3062 // CHECK-RV64-NEXT:  entry:
3063 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3064 // CHECK-RV64-NEXT:    ret void
3065 //
test_vsuxseg3ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)3066 void test_vsuxseg3ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
3067   return vsuxseg3ei8_v_u8mf8(base, bindex, v0, v1, v2, vl);
3068 }
3069 
3070 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf8(
3071 // CHECK-RV64-NEXT:  entry:
3072 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3073 // CHECK-RV64-NEXT:    ret void
3074 //
test_vsuxseg4ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)3075 void test_vsuxseg4ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
3076   return vsuxseg4ei8_v_u8mf8(base, bindex, v0, v1, v2, v3, vl);
3077 }
3078 
3079 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf8(
3080 // CHECK-RV64-NEXT:  entry:
3081 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3082 // CHECK-RV64-NEXT:    ret void
3083 //
test_vsuxseg5ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)3084 void test_vsuxseg5ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
3085   return vsuxseg5ei8_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
3086 }
3087 
3088 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf8(
3089 // CHECK-RV64-NEXT:  entry:
3090 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3091 // CHECK-RV64-NEXT:    ret void
3092 //
test_vsuxseg6ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)3093 void test_vsuxseg6ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
3094   return vsuxseg6ei8_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3095 }
3096 
3097 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf8(
3098 // CHECK-RV64-NEXT:  entry:
3099 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3100 // CHECK-RV64-NEXT:    ret void
3101 //
test_vsuxseg7ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)3102 void test_vsuxseg7ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
3103   return vsuxseg7ei8_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3104 }
3105 
3106 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf8(
3107 // CHECK-RV64-NEXT:  entry:
3108 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3109 // CHECK-RV64-NEXT:    ret void
3110 //
test_vsuxseg8ei8_v_u8mf8(uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)3111 void test_vsuxseg8ei8_v_u8mf8 (uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
3112   return vsuxseg8ei8_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3113 }
3114 
3115 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf4(
3116 // CHECK-RV64-NEXT:  entry:
3117 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3118 // CHECK-RV64-NEXT:    ret void
3119 //
test_vsuxseg2ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)3120 void test_vsuxseg2ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
3121   return vsuxseg2ei8_v_u8mf4(base, bindex, v0, v1, vl);
3122 }
3123 
3124 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf4(
3125 // CHECK-RV64-NEXT:  entry:
3126 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3127 // CHECK-RV64-NEXT:    ret void
3128 //
test_vsuxseg3ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)3129 void test_vsuxseg3ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
3130   return vsuxseg3ei8_v_u8mf4(base, bindex, v0, v1, v2, vl);
3131 }
3132 
3133 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf4(
3134 // CHECK-RV64-NEXT:  entry:
3135 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3136 // CHECK-RV64-NEXT:    ret void
3137 //
test_vsuxseg4ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)3138 void test_vsuxseg4ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
3139   return vsuxseg4ei8_v_u8mf4(base, bindex, v0, v1, v2, v3, vl);
3140 }
3141 
3142 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf4(
3143 // CHECK-RV64-NEXT:  entry:
3144 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3145 // CHECK-RV64-NEXT:    ret void
3146 //
test_vsuxseg5ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)3147 void test_vsuxseg5ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
3148   return vsuxseg5ei8_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
3149 }
3150 
3151 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf4(
3152 // CHECK-RV64-NEXT:  entry:
3153 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3154 // CHECK-RV64-NEXT:    ret void
3155 //
test_vsuxseg6ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)3156 void test_vsuxseg6ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
3157   return vsuxseg6ei8_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3158 }
3159 
3160 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf4(
3161 // CHECK-RV64-NEXT:  entry:
3162 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3163 // CHECK-RV64-NEXT:    ret void
3164 //
test_vsuxseg7ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)3165 void test_vsuxseg7ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
3166   return vsuxseg7ei8_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3167 }
3168 
3169 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf4(
3170 // CHECK-RV64-NEXT:  entry:
3171 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3172 // CHECK-RV64-NEXT:    ret void
3173 //
test_vsuxseg8ei8_v_u8mf4(uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)3174 void test_vsuxseg8ei8_v_u8mf4 (uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
3175   return vsuxseg8ei8_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3176 }
3177 
3178 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf2(
3179 // CHECK-RV64-NEXT:  entry:
3180 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3181 // CHECK-RV64-NEXT:    ret void
3182 //
test_vsuxseg2ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)3183 void test_vsuxseg2ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
3184   return vsuxseg2ei8_v_u8mf2(base, bindex, v0, v1, vl);
3185 }
3186 
3187 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf2(
3188 // CHECK-RV64-NEXT:  entry:
3189 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3190 // CHECK-RV64-NEXT:    ret void
3191 //
test_vsuxseg3ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)3192 void test_vsuxseg3ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
3193   return vsuxseg3ei8_v_u8mf2(base, bindex, v0, v1, v2, vl);
3194 }
3195 
3196 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf2(
3197 // CHECK-RV64-NEXT:  entry:
3198 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3199 // CHECK-RV64-NEXT:    ret void
3200 //
test_vsuxseg4ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)3201 void test_vsuxseg4ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
3202   return vsuxseg4ei8_v_u8mf2(base, bindex, v0, v1, v2, v3, vl);
3203 }
3204 
3205 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf2(
3206 // CHECK-RV64-NEXT:  entry:
3207 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3208 // CHECK-RV64-NEXT:    ret void
3209 //
test_vsuxseg5ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)3210 void test_vsuxseg5ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
3211   return vsuxseg5ei8_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
3212 }
3213 
3214 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf2(
3215 // CHECK-RV64-NEXT:  entry:
3216 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3217 // CHECK-RV64-NEXT:    ret void
3218 //
test_vsuxseg6ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)3219 void test_vsuxseg6ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
3220   return vsuxseg6ei8_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3221 }
3222 
3223 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf2(
3224 // CHECK-RV64-NEXT:  entry:
3225 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3226 // CHECK-RV64-NEXT:    ret void
3227 //
test_vsuxseg7ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)3228 void test_vsuxseg7ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
3229   return vsuxseg7ei8_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3230 }
3231 
3232 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf2(
3233 // CHECK-RV64-NEXT:  entry:
3234 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3235 // CHECK-RV64-NEXT:    ret void
3236 //
test_vsuxseg8ei8_v_u8mf2(uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)3237 void test_vsuxseg8ei8_v_u8mf2 (uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
3238   return vsuxseg8ei8_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3239 }
3240 
3241 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m1(
3242 // CHECK-RV64-NEXT:  entry:
3243 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3244 // CHECK-RV64-NEXT:    ret void
3245 //
test_vsuxseg2ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)3246 void test_vsuxseg2ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
3247   return vsuxseg2ei8_v_u8m1(base, bindex, v0, v1, vl);
3248 }
3249 
3250 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8m1(
3251 // CHECK-RV64-NEXT:  entry:
3252 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3253 // CHECK-RV64-NEXT:    ret void
3254 //
test_vsuxseg3ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)3255 void test_vsuxseg3ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
3256   return vsuxseg3ei8_v_u8m1(base, bindex, v0, v1, v2, vl);
3257 }
3258 
3259 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8m1(
3260 // CHECK-RV64-NEXT:  entry:
3261 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3262 // CHECK-RV64-NEXT:    ret void
3263 //
test_vsuxseg4ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)3264 void test_vsuxseg4ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
3265   return vsuxseg4ei8_v_u8m1(base, bindex, v0, v1, v2, v3, vl);
3266 }
3267 
3268 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8m1(
3269 // CHECK-RV64-NEXT:  entry:
3270 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3271 // CHECK-RV64-NEXT:    ret void
3272 //
test_vsuxseg5ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)3273 void test_vsuxseg5ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
3274   return vsuxseg5ei8_v_u8m1(base, bindex, v0, v1, v2, v3, v4, vl);
3275 }
3276 
3277 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8m1(
3278 // CHECK-RV64-NEXT:  entry:
3279 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3280 // CHECK-RV64-NEXT:    ret void
3281 //
test_vsuxseg6ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)3282 void test_vsuxseg6ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
3283   return vsuxseg6ei8_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3284 }
3285 
3286 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8m1(
3287 // CHECK-RV64-NEXT:  entry:
3288 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3289 // CHECK-RV64-NEXT:    ret void
3290 //
test_vsuxseg7ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)3291 void test_vsuxseg7ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
3292   return vsuxseg7ei8_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3293 }
3294 
3295 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8m1(
3296 // CHECK-RV64-NEXT:  entry:
3297 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3298 // CHECK-RV64-NEXT:    ret void
3299 //
test_vsuxseg8ei8_v_u8m1(uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)3300 void test_vsuxseg8ei8_v_u8m1 (uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
3301   return vsuxseg8ei8_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3302 }
3303 
3304 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m2(
3305 // CHECK-RV64-NEXT:  entry:
3306 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3307 // CHECK-RV64-NEXT:    ret void
3308 //
test_vsuxseg2ei8_v_u8m2(uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)3309 void test_vsuxseg2ei8_v_u8m2 (uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
3310   return vsuxseg2ei8_v_u8m2(base, bindex, v0, v1, vl);
3311 }
3312 
3313 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8m2(
3314 // CHECK-RV64-NEXT:  entry:
3315 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3316 // CHECK-RV64-NEXT:    ret void
3317 //
test_vsuxseg3ei8_v_u8m2(uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)3318 void test_vsuxseg3ei8_v_u8m2 (uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
3319   return vsuxseg3ei8_v_u8m2(base, bindex, v0, v1, v2, vl);
3320 }
3321 
3322 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8m2(
3323 // CHECK-RV64-NEXT:  entry:
3324 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3325 // CHECK-RV64-NEXT:    ret void
3326 //
test_vsuxseg4ei8_v_u8m2(uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)3327 void test_vsuxseg4ei8_v_u8m2 (uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
3328   return vsuxseg4ei8_v_u8m2(base, bindex, v0, v1, v2, v3, vl);
3329 }
3330 
3331 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m4(
3332 // CHECK-RV64-NEXT:  entry:
3333 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
3334 // CHECK-RV64-NEXT:    ret void
3335 //
test_vsuxseg2ei8_v_u8m4(uint8_t * base,vuint8m4_t bindex,vuint8m4_t v0,vuint8m4_t v1,size_t vl)3336 void test_vsuxseg2ei8_v_u8m4 (uint8_t *base, vuint8m4_t bindex, vuint8m4_t v0, vuint8m4_t v1, size_t vl) {
3337   return vsuxseg2ei8_v_u8m4(base, bindex, v0, v1, vl);
3338 }
3339 
3340 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf8(
3341 // CHECK-RV64-NEXT:  entry:
3342 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3343 // CHECK-RV64-NEXT:    ret void
3344 //
test_vsuxseg2ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)3345 void test_vsuxseg2ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
3346   return vsuxseg2ei16_v_u8mf8(base, bindex, v0, v1, vl);
3347 }
3348 
3349 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf8(
3350 // CHECK-RV64-NEXT:  entry:
3351 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3352 // CHECK-RV64-NEXT:    ret void
3353 //
test_vsuxseg3ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)3354 void test_vsuxseg3ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
3355   return vsuxseg3ei16_v_u8mf8(base, bindex, v0, v1, v2, vl);
3356 }
3357 
3358 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf8(
3359 // CHECK-RV64-NEXT:  entry:
3360 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3361 // CHECK-RV64-NEXT:    ret void
3362 //
test_vsuxseg4ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)3363 void test_vsuxseg4ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
3364   return vsuxseg4ei16_v_u8mf8(base, bindex, v0, v1, v2, v3, vl);
3365 }
3366 
3367 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf8(
3368 // CHECK-RV64-NEXT:  entry:
3369 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3370 // CHECK-RV64-NEXT:    ret void
3371 //
test_vsuxseg5ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)3372 void test_vsuxseg5ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
3373   return vsuxseg5ei16_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
3374 }
3375 
3376 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf8(
3377 // CHECK-RV64-NEXT:  entry:
3378 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3379 // CHECK-RV64-NEXT:    ret void
3380 //
test_vsuxseg6ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)3381 void test_vsuxseg6ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
3382   return vsuxseg6ei16_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3383 }
3384 
3385 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf8(
3386 // CHECK-RV64-NEXT:  entry:
3387 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3388 // CHECK-RV64-NEXT:    ret void
3389 //
test_vsuxseg7ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)3390 void test_vsuxseg7ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
3391   return vsuxseg7ei16_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3392 }
3393 
3394 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf8(
3395 // CHECK-RV64-NEXT:  entry:
3396 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3397 // CHECK-RV64-NEXT:    ret void
3398 //
test_vsuxseg8ei16_v_u8mf8(uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)3399 void test_vsuxseg8ei16_v_u8mf8 (uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
3400   return vsuxseg8ei16_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3401 }
3402 
3403 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf4(
3404 // CHECK-RV64-NEXT:  entry:
3405 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3406 // CHECK-RV64-NEXT:    ret void
3407 //
test_vsuxseg2ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)3408 void test_vsuxseg2ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
3409   return vsuxseg2ei16_v_u8mf4(base, bindex, v0, v1, vl);
3410 }
3411 
3412 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf4(
3413 // CHECK-RV64-NEXT:  entry:
3414 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3415 // CHECK-RV64-NEXT:    ret void
3416 //
test_vsuxseg3ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)3417 void test_vsuxseg3ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
3418   return vsuxseg3ei16_v_u8mf4(base, bindex, v0, v1, v2, vl);
3419 }
3420 
3421 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf4(
3422 // CHECK-RV64-NEXT:  entry:
3423 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3424 // CHECK-RV64-NEXT:    ret void
3425 //
test_vsuxseg4ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)3426 void test_vsuxseg4ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
3427   return vsuxseg4ei16_v_u8mf4(base, bindex, v0, v1, v2, v3, vl);
3428 }
3429 
3430 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf4(
3431 // CHECK-RV64-NEXT:  entry:
3432 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3433 // CHECK-RV64-NEXT:    ret void
3434 //
test_vsuxseg5ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)3435 void test_vsuxseg5ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
3436   return vsuxseg5ei16_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
3437 }
3438 
3439 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf4(
3440 // CHECK-RV64-NEXT:  entry:
3441 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3442 // CHECK-RV64-NEXT:    ret void
3443 //
test_vsuxseg6ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)3444 void test_vsuxseg6ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
3445   return vsuxseg6ei16_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3446 }
3447 
3448 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf4(
3449 // CHECK-RV64-NEXT:  entry:
3450 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3451 // CHECK-RV64-NEXT:    ret void
3452 //
test_vsuxseg7ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)3453 void test_vsuxseg7ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
3454   return vsuxseg7ei16_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3455 }
3456 
3457 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf4(
3458 // CHECK-RV64-NEXT:  entry:
3459 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3460 // CHECK-RV64-NEXT:    ret void
3461 //
test_vsuxseg8ei16_v_u8mf4(uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)3462 void test_vsuxseg8ei16_v_u8mf4 (uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
3463   return vsuxseg8ei16_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3464 }
3465 
3466 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf2(
3467 // CHECK-RV64-NEXT:  entry:
3468 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3469 // CHECK-RV64-NEXT:    ret void
3470 //
test_vsuxseg2ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)3471 void test_vsuxseg2ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
3472   return vsuxseg2ei16_v_u8mf2(base, bindex, v0, v1, vl);
3473 }
3474 
3475 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf2(
3476 // CHECK-RV64-NEXT:  entry:
3477 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3478 // CHECK-RV64-NEXT:    ret void
3479 //
test_vsuxseg3ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)3480 void test_vsuxseg3ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
3481   return vsuxseg3ei16_v_u8mf2(base, bindex, v0, v1, v2, vl);
3482 }
3483 
3484 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf2(
3485 // CHECK-RV64-NEXT:  entry:
3486 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3487 // CHECK-RV64-NEXT:    ret void
3488 //
test_vsuxseg4ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)3489 void test_vsuxseg4ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
3490   return vsuxseg4ei16_v_u8mf2(base, bindex, v0, v1, v2, v3, vl);
3491 }
3492 
3493 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf2(
3494 // CHECK-RV64-NEXT:  entry:
3495 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3496 // CHECK-RV64-NEXT:    ret void
3497 //
test_vsuxseg5ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)3498 void test_vsuxseg5ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
3499   return vsuxseg5ei16_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
3500 }
3501 
3502 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf2(
3503 // CHECK-RV64-NEXT:  entry:
3504 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3505 // CHECK-RV64-NEXT:    ret void
3506 //
test_vsuxseg6ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)3507 void test_vsuxseg6ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
3508   return vsuxseg6ei16_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3509 }
3510 
3511 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf2(
3512 // CHECK-RV64-NEXT:  entry:
3513 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3514 // CHECK-RV64-NEXT:    ret void
3515 //
test_vsuxseg7ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)3516 void test_vsuxseg7ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
3517   return vsuxseg7ei16_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3518 }
3519 
3520 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf2(
3521 // CHECK-RV64-NEXT:  entry:
3522 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3523 // CHECK-RV64-NEXT:    ret void
3524 //
test_vsuxseg8ei16_v_u8mf2(uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)3525 void test_vsuxseg8ei16_v_u8mf2 (uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
3526   return vsuxseg8ei16_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3527 }
3528 
3529 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m1(
3530 // CHECK-RV64-NEXT:  entry:
3531 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3532 // CHECK-RV64-NEXT:    ret void
3533 //
test_vsuxseg2ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)3534 void test_vsuxseg2ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
3535   return vsuxseg2ei16_v_u8m1(base, bindex, v0, v1, vl);
3536 }
3537 
3538 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8m1(
3539 // CHECK-RV64-NEXT:  entry:
3540 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3541 // CHECK-RV64-NEXT:    ret void
3542 //
test_vsuxseg3ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)3543 void test_vsuxseg3ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
3544   return vsuxseg3ei16_v_u8m1(base, bindex, v0, v1, v2, vl);
3545 }
3546 
3547 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8m1(
3548 // CHECK-RV64-NEXT:  entry:
3549 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3550 // CHECK-RV64-NEXT:    ret void
3551 //
test_vsuxseg4ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)3552 void test_vsuxseg4ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
3553   return vsuxseg4ei16_v_u8m1(base, bindex, v0, v1, v2, v3, vl);
3554 }
3555 
3556 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8m1(
3557 // CHECK-RV64-NEXT:  entry:
3558 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3559 // CHECK-RV64-NEXT:    ret void
3560 //
test_vsuxseg5ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)3561 void test_vsuxseg5ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
3562   return vsuxseg5ei16_v_u8m1(base, bindex, v0, v1, v2, v3, v4, vl);
3563 }
3564 
3565 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8m1(
3566 // CHECK-RV64-NEXT:  entry:
3567 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3568 // CHECK-RV64-NEXT:    ret void
3569 //
test_vsuxseg6ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)3570 void test_vsuxseg6ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
3571   return vsuxseg6ei16_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3572 }
3573 
3574 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8m1(
3575 // CHECK-RV64-NEXT:  entry:
3576 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3577 // CHECK-RV64-NEXT:    ret void
3578 //
test_vsuxseg7ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)3579 void test_vsuxseg7ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
3580   return vsuxseg7ei16_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3581 }
3582 
3583 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8m1(
3584 // CHECK-RV64-NEXT:  entry:
3585 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3586 // CHECK-RV64-NEXT:    ret void
3587 //
test_vsuxseg8ei16_v_u8m1(uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)3588 void test_vsuxseg8ei16_v_u8m1 (uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
3589   return vsuxseg8ei16_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3590 }
3591 
3592 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m2(
3593 // CHECK-RV64-NEXT:  entry:
3594 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3595 // CHECK-RV64-NEXT:    ret void
3596 //
test_vsuxseg2ei16_v_u8m2(uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)3597 void test_vsuxseg2ei16_v_u8m2 (uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
3598   return vsuxseg2ei16_v_u8m2(base, bindex, v0, v1, vl);
3599 }
3600 
3601 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8m2(
3602 // CHECK-RV64-NEXT:  entry:
3603 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3604 // CHECK-RV64-NEXT:    ret void
3605 //
test_vsuxseg3ei16_v_u8m2(uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)3606 void test_vsuxseg3ei16_v_u8m2 (uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
3607   return vsuxseg3ei16_v_u8m2(base, bindex, v0, v1, v2, vl);
3608 }
3609 
3610 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8m2(
3611 // CHECK-RV64-NEXT:  entry:
3612 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3613 // CHECK-RV64-NEXT:    ret void
3614 //
test_vsuxseg4ei16_v_u8m2(uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)3615 void test_vsuxseg4ei16_v_u8m2 (uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
3616   return vsuxseg4ei16_v_u8m2(base, bindex, v0, v1, v2, v3, vl);
3617 }
3618 
3619 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m4(
3620 // CHECK-RV64-NEXT:  entry:
3621 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
3622 // CHECK-RV64-NEXT:    ret void
3623 //
test_vsuxseg2ei16_v_u8m4(uint8_t * base,vuint16m8_t bindex,vuint8m4_t v0,vuint8m4_t v1,size_t vl)3624 void test_vsuxseg2ei16_v_u8m4 (uint8_t *base, vuint16m8_t bindex, vuint8m4_t v0, vuint8m4_t v1, size_t vl) {
3625   return vsuxseg2ei16_v_u8m4(base, bindex, v0, v1, vl);
3626 }
3627 
3628 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf8(
3629 // CHECK-RV64-NEXT:  entry:
3630 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3631 // CHECK-RV64-NEXT:    ret void
3632 //
test_vsuxseg2ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)3633 void test_vsuxseg2ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
3634   return vsuxseg2ei32_v_u8mf8(base, bindex, v0, v1, vl);
3635 }
3636 
3637 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf8(
3638 // CHECK-RV64-NEXT:  entry:
3639 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3640 // CHECK-RV64-NEXT:    ret void
3641 //
test_vsuxseg3ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)3642 void test_vsuxseg3ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
3643   return vsuxseg3ei32_v_u8mf8(base, bindex, v0, v1, v2, vl);
3644 }
3645 
3646 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf8(
3647 // CHECK-RV64-NEXT:  entry:
3648 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3649 // CHECK-RV64-NEXT:    ret void
3650 //
test_vsuxseg4ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)3651 void test_vsuxseg4ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
3652   return vsuxseg4ei32_v_u8mf8(base, bindex, v0, v1, v2, v3, vl);
3653 }
3654 
3655 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf8(
3656 // CHECK-RV64-NEXT:  entry:
3657 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3658 // CHECK-RV64-NEXT:    ret void
3659 //
test_vsuxseg5ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)3660 void test_vsuxseg5ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
3661   return vsuxseg5ei32_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
3662 }
3663 
3664 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf8(
3665 // CHECK-RV64-NEXT:  entry:
3666 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3667 // CHECK-RV64-NEXT:    ret void
3668 //
test_vsuxseg6ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)3669 void test_vsuxseg6ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
3670   return vsuxseg6ei32_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3671 }
3672 
3673 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf8(
3674 // CHECK-RV64-NEXT:  entry:
3675 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3676 // CHECK-RV64-NEXT:    ret void
3677 //
test_vsuxseg7ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)3678 void test_vsuxseg7ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
3679   return vsuxseg7ei32_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3680 }
3681 
3682 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf8(
3683 // CHECK-RV64-NEXT:  entry:
3684 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3685 // CHECK-RV64-NEXT:    ret void
3686 //
test_vsuxseg8ei32_v_u8mf8(uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)3687 void test_vsuxseg8ei32_v_u8mf8 (uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
3688   return vsuxseg8ei32_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3689 }
3690 
3691 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf4(
3692 // CHECK-RV64-NEXT:  entry:
3693 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3694 // CHECK-RV64-NEXT:    ret void
3695 //
test_vsuxseg2ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)3696 void test_vsuxseg2ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
3697   return vsuxseg2ei32_v_u8mf4(base, bindex, v0, v1, vl);
3698 }
3699 
3700 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf4(
3701 // CHECK-RV64-NEXT:  entry:
3702 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3703 // CHECK-RV64-NEXT:    ret void
3704 //
test_vsuxseg3ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)3705 void test_vsuxseg3ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
3706   return vsuxseg3ei32_v_u8mf4(base, bindex, v0, v1, v2, vl);
3707 }
3708 
3709 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf4(
3710 // CHECK-RV64-NEXT:  entry:
3711 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3712 // CHECK-RV64-NEXT:    ret void
3713 //
test_vsuxseg4ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)3714 void test_vsuxseg4ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
3715   return vsuxseg4ei32_v_u8mf4(base, bindex, v0, v1, v2, v3, vl);
3716 }
3717 
3718 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf4(
3719 // CHECK-RV64-NEXT:  entry:
3720 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3721 // CHECK-RV64-NEXT:    ret void
3722 //
test_vsuxseg5ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)3723 void test_vsuxseg5ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
3724   return vsuxseg5ei32_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
3725 }
3726 
3727 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf4(
3728 // CHECK-RV64-NEXT:  entry:
3729 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3730 // CHECK-RV64-NEXT:    ret void
3731 //
test_vsuxseg6ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)3732 void test_vsuxseg6ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
3733   return vsuxseg6ei32_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3734 }
3735 
3736 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf4(
3737 // CHECK-RV64-NEXT:  entry:
3738 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3739 // CHECK-RV64-NEXT:    ret void
3740 //
test_vsuxseg7ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)3741 void test_vsuxseg7ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
3742   return vsuxseg7ei32_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3743 }
3744 
3745 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf4(
3746 // CHECK-RV64-NEXT:  entry:
3747 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3748 // CHECK-RV64-NEXT:    ret void
3749 //
test_vsuxseg8ei32_v_u8mf4(uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)3750 void test_vsuxseg8ei32_v_u8mf4 (uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
3751   return vsuxseg8ei32_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3752 }
3753 
3754 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf2(
3755 // CHECK-RV64-NEXT:  entry:
3756 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3757 // CHECK-RV64-NEXT:    ret void
3758 //
test_vsuxseg2ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)3759 void test_vsuxseg2ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
3760   return vsuxseg2ei32_v_u8mf2(base, bindex, v0, v1, vl);
3761 }
3762 
3763 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf2(
3764 // CHECK-RV64-NEXT:  entry:
3765 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3766 // CHECK-RV64-NEXT:    ret void
3767 //
test_vsuxseg3ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)3768 void test_vsuxseg3ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
3769   return vsuxseg3ei32_v_u8mf2(base, bindex, v0, v1, v2, vl);
3770 }
3771 
3772 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf2(
3773 // CHECK-RV64-NEXT:  entry:
3774 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3775 // CHECK-RV64-NEXT:    ret void
3776 //
test_vsuxseg4ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)3777 void test_vsuxseg4ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
3778   return vsuxseg4ei32_v_u8mf2(base, bindex, v0, v1, v2, v3, vl);
3779 }
3780 
3781 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf2(
3782 // CHECK-RV64-NEXT:  entry:
3783 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3784 // CHECK-RV64-NEXT:    ret void
3785 //
test_vsuxseg5ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)3786 void test_vsuxseg5ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
3787   return vsuxseg5ei32_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
3788 }
3789 
3790 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf2(
3791 // CHECK-RV64-NEXT:  entry:
3792 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3793 // CHECK-RV64-NEXT:    ret void
3794 //
test_vsuxseg6ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)3795 void test_vsuxseg6ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
3796   return vsuxseg6ei32_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3797 }
3798 
3799 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf2(
3800 // CHECK-RV64-NEXT:  entry:
3801 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3802 // CHECK-RV64-NEXT:    ret void
3803 //
test_vsuxseg7ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)3804 void test_vsuxseg7ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
3805   return vsuxseg7ei32_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3806 }
3807 
3808 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf2(
3809 // CHECK-RV64-NEXT:  entry:
3810 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3811 // CHECK-RV64-NEXT:    ret void
3812 //
test_vsuxseg8ei32_v_u8mf2(uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)3813 void test_vsuxseg8ei32_v_u8mf2 (uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
3814   return vsuxseg8ei32_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3815 }
3816 
3817 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8m1(
3818 // CHECK-RV64-NEXT:  entry:
3819 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3820 // CHECK-RV64-NEXT:    ret void
3821 //
test_vsuxseg2ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)3822 void test_vsuxseg2ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
3823   return vsuxseg2ei32_v_u8m1(base, bindex, v0, v1, vl);
3824 }
3825 
3826 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8m1(
3827 // CHECK-RV64-NEXT:  entry:
3828 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3829 // CHECK-RV64-NEXT:    ret void
3830 //
test_vsuxseg3ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)3831 void test_vsuxseg3ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
3832   return vsuxseg3ei32_v_u8m1(base, bindex, v0, v1, v2, vl);
3833 }
3834 
3835 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8m1(
3836 // CHECK-RV64-NEXT:  entry:
3837 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3838 // CHECK-RV64-NEXT:    ret void
3839 //
test_vsuxseg4ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)3840 void test_vsuxseg4ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
3841   return vsuxseg4ei32_v_u8m1(base, bindex, v0, v1, v2, v3, vl);
3842 }
3843 
3844 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8m1(
3845 // CHECK-RV64-NEXT:  entry:
3846 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3847 // CHECK-RV64-NEXT:    ret void
3848 //
test_vsuxseg5ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)3849 void test_vsuxseg5ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
3850   return vsuxseg5ei32_v_u8m1(base, bindex, v0, v1, v2, v3, v4, vl);
3851 }
3852 
3853 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8m1(
3854 // CHECK-RV64-NEXT:  entry:
3855 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3856 // CHECK-RV64-NEXT:    ret void
3857 //
test_vsuxseg6ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)3858 void test_vsuxseg6ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
3859   return vsuxseg6ei32_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3860 }
3861 
3862 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8m1(
3863 // CHECK-RV64-NEXT:  entry:
3864 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3865 // CHECK-RV64-NEXT:    ret void
3866 //
test_vsuxseg7ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)3867 void test_vsuxseg7ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
3868   return vsuxseg7ei32_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3869 }
3870 
3871 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8m1(
3872 // CHECK-RV64-NEXT:  entry:
3873 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3874 // CHECK-RV64-NEXT:    ret void
3875 //
test_vsuxseg8ei32_v_u8m1(uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)3876 void test_vsuxseg8ei32_v_u8m1 (uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
3877   return vsuxseg8ei32_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3878 }
3879 
3880 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8m2(
3881 // CHECK-RV64-NEXT:  entry:
3882 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3883 // CHECK-RV64-NEXT:    ret void
3884 //
test_vsuxseg2ei32_v_u8m2(uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)3885 void test_vsuxseg2ei32_v_u8m2 (uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
3886   return vsuxseg2ei32_v_u8m2(base, bindex, v0, v1, vl);
3887 }
3888 
3889 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8m2(
3890 // CHECK-RV64-NEXT:  entry:
3891 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3892 // CHECK-RV64-NEXT:    ret void
3893 //
test_vsuxseg3ei32_v_u8m2(uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)3894 void test_vsuxseg3ei32_v_u8m2 (uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
3895   return vsuxseg3ei32_v_u8m2(base, bindex, v0, v1, v2, vl);
3896 }
3897 
3898 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8m2(
3899 // CHECK-RV64-NEXT:  entry:
3900 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
3901 // CHECK-RV64-NEXT:    ret void
3902 //
test_vsuxseg4ei32_v_u8m2(uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)3903 void test_vsuxseg4ei32_v_u8m2 (uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
3904   return vsuxseg4ei32_v_u8m2(base, bindex, v0, v1, v2, v3, vl);
3905 }
3906 
3907 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf8(
3908 // CHECK-RV64-NEXT:  entry:
3909 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3910 // CHECK-RV64-NEXT:    ret void
3911 //
test_vsuxseg2ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)3912 void test_vsuxseg2ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
3913   return vsuxseg2ei64_v_u8mf8(base, bindex, v0, v1, vl);
3914 }
3915 
3916 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf8(
3917 // CHECK-RV64-NEXT:  entry:
3918 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3919 // CHECK-RV64-NEXT:    ret void
3920 //
test_vsuxseg3ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)3921 void test_vsuxseg3ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
3922   return vsuxseg3ei64_v_u8mf8(base, bindex, v0, v1, v2, vl);
3923 }
3924 
3925 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf8(
3926 // CHECK-RV64-NEXT:  entry:
3927 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3928 // CHECK-RV64-NEXT:    ret void
3929 //
test_vsuxseg4ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)3930 void test_vsuxseg4ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
3931   return vsuxseg4ei64_v_u8mf8(base, bindex, v0, v1, v2, v3, vl);
3932 }
3933 
3934 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf8(
3935 // CHECK-RV64-NEXT:  entry:
3936 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3937 // CHECK-RV64-NEXT:    ret void
3938 //
test_vsuxseg5ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)3939 void test_vsuxseg5ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
3940   return vsuxseg5ei64_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, vl);
3941 }
3942 
3943 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf8(
3944 // CHECK-RV64-NEXT:  entry:
3945 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3946 // CHECK-RV64-NEXT:    ret void
3947 //
test_vsuxseg6ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)3948 void test_vsuxseg6ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
3949   return vsuxseg6ei64_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, vl);
3950 }
3951 
3952 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf8(
3953 // CHECK-RV64-NEXT:  entry:
3954 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3955 // CHECK-RV64-NEXT:    ret void
3956 //
test_vsuxseg7ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)3957 void test_vsuxseg7ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
3958   return vsuxseg7ei64_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
3959 }
3960 
3961 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf8(
3962 // CHECK-RV64-NEXT:  entry:
3963 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3964 // CHECK-RV64-NEXT:    ret void
3965 //
test_vsuxseg8ei64_v_u8mf8(uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)3966 void test_vsuxseg8ei64_v_u8mf8 (uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
3967   return vsuxseg8ei64_v_u8mf8(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
3968 }
3969 
3970 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf4(
3971 // CHECK-RV64-NEXT:  entry:
3972 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3973 // CHECK-RV64-NEXT:    ret void
3974 //
test_vsuxseg2ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)3975 void test_vsuxseg2ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
3976   return vsuxseg2ei64_v_u8mf4(base, bindex, v0, v1, vl);
3977 }
3978 
3979 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf4(
3980 // CHECK-RV64-NEXT:  entry:
3981 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3982 // CHECK-RV64-NEXT:    ret void
3983 //
test_vsuxseg3ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)3984 void test_vsuxseg3ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
3985   return vsuxseg3ei64_v_u8mf4(base, bindex, v0, v1, v2, vl);
3986 }
3987 
3988 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf4(
3989 // CHECK-RV64-NEXT:  entry:
3990 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
3991 // CHECK-RV64-NEXT:    ret void
3992 //
test_vsuxseg4ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)3993 void test_vsuxseg4ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
3994   return vsuxseg4ei64_v_u8mf4(base, bindex, v0, v1, v2, v3, vl);
3995 }
3996 
3997 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf4(
3998 // CHECK-RV64-NEXT:  entry:
3999 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4000 // CHECK-RV64-NEXT:    ret void
4001 //
test_vsuxseg5ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)4002 void test_vsuxseg5ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
4003   return vsuxseg5ei64_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, vl);
4004 }
4005 
4006 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf4(
4007 // CHECK-RV64-NEXT:  entry:
4008 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4009 // CHECK-RV64-NEXT:    ret void
4010 //
test_vsuxseg6ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)4011 void test_vsuxseg6ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
4012   return vsuxseg6ei64_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4013 }
4014 
4015 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf4(
4016 // CHECK-RV64-NEXT:  entry:
4017 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4018 // CHECK-RV64-NEXT:    ret void
4019 //
test_vsuxseg7ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)4020 void test_vsuxseg7ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
4021   return vsuxseg7ei64_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4022 }
4023 
4024 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf4(
4025 // CHECK-RV64-NEXT:  entry:
4026 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4027 // CHECK-RV64-NEXT:    ret void
4028 //
test_vsuxseg8ei64_v_u8mf4(uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)4029 void test_vsuxseg8ei64_v_u8mf4 (uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
4030   return vsuxseg8ei64_v_u8mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4031 }
4032 
4033 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf2(
4034 // CHECK-RV64-NEXT:  entry:
4035 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4036 // CHECK-RV64-NEXT:    ret void
4037 //
test_vsuxseg2ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)4038 void test_vsuxseg2ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
4039   return vsuxseg2ei64_v_u8mf2(base, bindex, v0, v1, vl);
4040 }
4041 
4042 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf2(
4043 // CHECK-RV64-NEXT:  entry:
4044 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4045 // CHECK-RV64-NEXT:    ret void
4046 //
test_vsuxseg3ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)4047 void test_vsuxseg3ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
4048   return vsuxseg3ei64_v_u8mf2(base, bindex, v0, v1, v2, vl);
4049 }
4050 
4051 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf2(
4052 // CHECK-RV64-NEXT:  entry:
4053 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4054 // CHECK-RV64-NEXT:    ret void
4055 //
test_vsuxseg4ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)4056 void test_vsuxseg4ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
4057   return vsuxseg4ei64_v_u8mf2(base, bindex, v0, v1, v2, v3, vl);
4058 }
4059 
4060 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf2(
4061 // CHECK-RV64-NEXT:  entry:
4062 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4063 // CHECK-RV64-NEXT:    ret void
4064 //
test_vsuxseg5ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)4065 void test_vsuxseg5ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
4066   return vsuxseg5ei64_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, vl);
4067 }
4068 
4069 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf2(
4070 // CHECK-RV64-NEXT:  entry:
4071 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4072 // CHECK-RV64-NEXT:    ret void
4073 //
test_vsuxseg6ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)4074 void test_vsuxseg6ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
4075   return vsuxseg6ei64_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4076 }
4077 
4078 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf2(
4079 // CHECK-RV64-NEXT:  entry:
4080 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4081 // CHECK-RV64-NEXT:    ret void
4082 //
test_vsuxseg7ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)4083 void test_vsuxseg7ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
4084   return vsuxseg7ei64_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4085 }
4086 
4087 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf2(
4088 // CHECK-RV64-NEXT:  entry:
4089 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4090 // CHECK-RV64-NEXT:    ret void
4091 //
test_vsuxseg8ei64_v_u8mf2(uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)4092 void test_vsuxseg8ei64_v_u8mf2 (uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
4093   return vsuxseg8ei64_v_u8mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4094 }
4095 
4096 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8m1(
4097 // CHECK-RV64-NEXT:  entry:
4098 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4099 // CHECK-RV64-NEXT:    ret void
4100 //
test_vsuxseg2ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)4101 void test_vsuxseg2ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
4102   return vsuxseg2ei64_v_u8m1(base, bindex, v0, v1, vl);
4103 }
4104 
4105 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8m1(
4106 // CHECK-RV64-NEXT:  entry:
4107 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4108 // CHECK-RV64-NEXT:    ret void
4109 //
test_vsuxseg3ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)4110 void test_vsuxseg3ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
4111   return vsuxseg3ei64_v_u8m1(base, bindex, v0, v1, v2, vl);
4112 }
4113 
4114 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8m1(
4115 // CHECK-RV64-NEXT:  entry:
4116 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4117 // CHECK-RV64-NEXT:    ret void
4118 //
test_vsuxseg4ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)4119 void test_vsuxseg4ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
4120   return vsuxseg4ei64_v_u8m1(base, bindex, v0, v1, v2, v3, vl);
4121 }
4122 
4123 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8m1(
4124 // CHECK-RV64-NEXT:  entry:
4125 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4126 // CHECK-RV64-NEXT:    ret void
4127 //
test_vsuxseg5ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)4128 void test_vsuxseg5ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
4129   return vsuxseg5ei64_v_u8m1(base, bindex, v0, v1, v2, v3, v4, vl);
4130 }
4131 
4132 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8m1(
4133 // CHECK-RV64-NEXT:  entry:
4134 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4135 // CHECK-RV64-NEXT:    ret void
4136 //
test_vsuxseg6ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)4137 void test_vsuxseg6ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
4138   return vsuxseg6ei64_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4139 }
4140 
4141 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8m1(
4142 // CHECK-RV64-NEXT:  entry:
4143 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4144 // CHECK-RV64-NEXT:    ret void
4145 //
test_vsuxseg7ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)4146 void test_vsuxseg7ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
4147   return vsuxseg7ei64_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4148 }
4149 
4150 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8m1(
4151 // CHECK-RV64-NEXT:  entry:
4152 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4153 // CHECK-RV64-NEXT:    ret void
4154 //
test_vsuxseg8ei64_v_u8m1(uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)4155 void test_vsuxseg8ei64_v_u8m1 (uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
4156   return vsuxseg8ei64_v_u8m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4157 }
4158 
4159 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16mf4(
4160 // CHECK-RV64-NEXT:  entry:
4161 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4162 // CHECK-RV64-NEXT:    ret void
4163 //
test_vsuxseg2ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)4164 void test_vsuxseg2ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
4165   return vsuxseg2ei8_v_u16mf4(base, bindex, v0, v1, vl);
4166 }
4167 
4168 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16mf4(
4169 // CHECK-RV64-NEXT:  entry:
4170 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4171 // CHECK-RV64-NEXT:    ret void
4172 //
test_vsuxseg3ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)4173 void test_vsuxseg3ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
4174   return vsuxseg3ei8_v_u16mf4(base, bindex, v0, v1, v2, vl);
4175 }
4176 
4177 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16mf4(
4178 // CHECK-RV64-NEXT:  entry:
4179 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4180 // CHECK-RV64-NEXT:    ret void
4181 //
test_vsuxseg4ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)4182 void test_vsuxseg4ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
4183   return vsuxseg4ei8_v_u16mf4(base, bindex, v0, v1, v2, v3, vl);
4184 }
4185 
4186 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16mf4(
4187 // CHECK-RV64-NEXT:  entry:
4188 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4189 // CHECK-RV64-NEXT:    ret void
4190 //
test_vsuxseg5ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)4191 void test_vsuxseg5ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
4192   return vsuxseg5ei8_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
4193 }
4194 
4195 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16mf4(
4196 // CHECK-RV64-NEXT:  entry:
4197 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4198 // CHECK-RV64-NEXT:    ret void
4199 //
test_vsuxseg6ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)4200 void test_vsuxseg6ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
4201   return vsuxseg6ei8_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4202 }
4203 
4204 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16mf4(
4205 // CHECK-RV64-NEXT:  entry:
4206 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4207 // CHECK-RV64-NEXT:    ret void
4208 //
test_vsuxseg7ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)4209 void test_vsuxseg7ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
4210   return vsuxseg7ei8_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4211 }
4212 
4213 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16mf4(
4214 // CHECK-RV64-NEXT:  entry:
4215 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4216 // CHECK-RV64-NEXT:    ret void
4217 //
test_vsuxseg8ei8_v_u16mf4(uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)4218 void test_vsuxseg8ei8_v_u16mf4 (uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
4219   return vsuxseg8ei8_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4220 }
4221 
4222 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16mf2(
4223 // CHECK-RV64-NEXT:  entry:
4224 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4225 // CHECK-RV64-NEXT:    ret void
4226 //
test_vsuxseg2ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)4227 void test_vsuxseg2ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
4228   return vsuxseg2ei8_v_u16mf2(base, bindex, v0, v1, vl);
4229 }
4230 
4231 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16mf2(
4232 // CHECK-RV64-NEXT:  entry:
4233 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4234 // CHECK-RV64-NEXT:    ret void
4235 //
test_vsuxseg3ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)4236 void test_vsuxseg3ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
4237   return vsuxseg3ei8_v_u16mf2(base, bindex, v0, v1, v2, vl);
4238 }
4239 
4240 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16mf2(
4241 // CHECK-RV64-NEXT:  entry:
4242 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4243 // CHECK-RV64-NEXT:    ret void
4244 //
test_vsuxseg4ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)4245 void test_vsuxseg4ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
4246   return vsuxseg4ei8_v_u16mf2(base, bindex, v0, v1, v2, v3, vl);
4247 }
4248 
4249 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16mf2(
4250 // CHECK-RV64-NEXT:  entry:
4251 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4252 // CHECK-RV64-NEXT:    ret void
4253 //
test_vsuxseg5ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)4254 void test_vsuxseg5ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
4255   return vsuxseg5ei8_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
4256 }
4257 
4258 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16mf2(
4259 // CHECK-RV64-NEXT:  entry:
4260 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4261 // CHECK-RV64-NEXT:    ret void
4262 //
test_vsuxseg6ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)4263 void test_vsuxseg6ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
4264   return vsuxseg6ei8_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4265 }
4266 
4267 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16mf2(
4268 // CHECK-RV64-NEXT:  entry:
4269 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4270 // CHECK-RV64-NEXT:    ret void
4271 //
test_vsuxseg7ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)4272 void test_vsuxseg7ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
4273   return vsuxseg7ei8_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4274 }
4275 
4276 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16mf2(
4277 // CHECK-RV64-NEXT:  entry:
4278 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4279 // CHECK-RV64-NEXT:    ret void
4280 //
test_vsuxseg8ei8_v_u16mf2(uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)4281 void test_vsuxseg8ei8_v_u16mf2 (uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
4282   return vsuxseg8ei8_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4283 }
4284 
4285 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m1(
4286 // CHECK-RV64-NEXT:  entry:
4287 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4288 // CHECK-RV64-NEXT:    ret void
4289 //
test_vsuxseg2ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)4290 void test_vsuxseg2ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
4291   return vsuxseg2ei8_v_u16m1(base, bindex, v0, v1, vl);
4292 }
4293 
4294 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16m1(
4295 // CHECK-RV64-NEXT:  entry:
4296 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4297 // CHECK-RV64-NEXT:    ret void
4298 //
test_vsuxseg3ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)4299 void test_vsuxseg3ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
4300   return vsuxseg3ei8_v_u16m1(base, bindex, v0, v1, v2, vl);
4301 }
4302 
4303 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16m1(
4304 // CHECK-RV64-NEXT:  entry:
4305 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4306 // CHECK-RV64-NEXT:    ret void
4307 //
test_vsuxseg4ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)4308 void test_vsuxseg4ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
4309   return vsuxseg4ei8_v_u16m1(base, bindex, v0, v1, v2, v3, vl);
4310 }
4311 
4312 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16m1(
4313 // CHECK-RV64-NEXT:  entry:
4314 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4315 // CHECK-RV64-NEXT:    ret void
4316 //
test_vsuxseg5ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)4317 void test_vsuxseg5ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
4318   return vsuxseg5ei8_v_u16m1(base, bindex, v0, v1, v2, v3, v4, vl);
4319 }
4320 
4321 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16m1(
4322 // CHECK-RV64-NEXT:  entry:
4323 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4324 // CHECK-RV64-NEXT:    ret void
4325 //
test_vsuxseg6ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)4326 void test_vsuxseg6ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
4327   return vsuxseg6ei8_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4328 }
4329 
4330 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16m1(
4331 // CHECK-RV64-NEXT:  entry:
4332 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4333 // CHECK-RV64-NEXT:    ret void
4334 //
test_vsuxseg7ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)4335 void test_vsuxseg7ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
4336   return vsuxseg7ei8_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4337 }
4338 
4339 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16m1(
4340 // CHECK-RV64-NEXT:  entry:
4341 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4342 // CHECK-RV64-NEXT:    ret void
4343 //
test_vsuxseg8ei8_v_u16m1(uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)4344 void test_vsuxseg8ei8_v_u16m1 (uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
4345   return vsuxseg8ei8_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4346 }
4347 
4348 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m2(
4349 // CHECK-RV64-NEXT:  entry:
4350 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4351 // CHECK-RV64-NEXT:    ret void
4352 //
test_vsuxseg2ei8_v_u16m2(uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)4353 void test_vsuxseg2ei8_v_u16m2 (uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
4354   return vsuxseg2ei8_v_u16m2(base, bindex, v0, v1, vl);
4355 }
4356 
4357 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16m2(
4358 // CHECK-RV64-NEXT:  entry:
4359 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4360 // CHECK-RV64-NEXT:    ret void
4361 //
test_vsuxseg3ei8_v_u16m2(uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)4362 void test_vsuxseg3ei8_v_u16m2 (uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
4363   return vsuxseg3ei8_v_u16m2(base, bindex, v0, v1, v2, vl);
4364 }
4365 
4366 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16m2(
4367 // CHECK-RV64-NEXT:  entry:
4368 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4369 // CHECK-RV64-NEXT:    ret void
4370 //
test_vsuxseg4ei8_v_u16m2(uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)4371 void test_vsuxseg4ei8_v_u16m2 (uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
4372   return vsuxseg4ei8_v_u16m2(base, bindex, v0, v1, v2, v3, vl);
4373 }
4374 
4375 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m4(
4376 // CHECK-RV64-NEXT:  entry:
4377 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
4378 // CHECK-RV64-NEXT:    ret void
4379 //
test_vsuxseg2ei8_v_u16m4(uint16_t * base,vuint8m2_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)4380 void test_vsuxseg2ei8_v_u16m4 (uint16_t *base, vuint8m2_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
4381   return vsuxseg2ei8_v_u16m4(base, bindex, v0, v1, vl);
4382 }
4383 
4384 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16mf4(
4385 // CHECK-RV64-NEXT:  entry:
4386 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4387 // CHECK-RV64-NEXT:    ret void
4388 //
test_vsuxseg2ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)4389 void test_vsuxseg2ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
4390   return vsuxseg2ei16_v_u16mf4(base, bindex, v0, v1, vl);
4391 }
4392 
4393 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16mf4(
4394 // CHECK-RV64-NEXT:  entry:
4395 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4396 // CHECK-RV64-NEXT:    ret void
4397 //
test_vsuxseg3ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)4398 void test_vsuxseg3ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
4399   return vsuxseg3ei16_v_u16mf4(base, bindex, v0, v1, v2, vl);
4400 }
4401 
4402 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16mf4(
4403 // CHECK-RV64-NEXT:  entry:
4404 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4405 // CHECK-RV64-NEXT:    ret void
4406 //
test_vsuxseg4ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)4407 void test_vsuxseg4ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
4408   return vsuxseg4ei16_v_u16mf4(base, bindex, v0, v1, v2, v3, vl);
4409 }
4410 
4411 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16mf4(
4412 // CHECK-RV64-NEXT:  entry:
4413 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4414 // CHECK-RV64-NEXT:    ret void
4415 //
test_vsuxseg5ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)4416 void test_vsuxseg5ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
4417   return vsuxseg5ei16_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
4418 }
4419 
4420 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16mf4(
4421 // CHECK-RV64-NEXT:  entry:
4422 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4423 // CHECK-RV64-NEXT:    ret void
4424 //
test_vsuxseg6ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)4425 void test_vsuxseg6ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
4426   return vsuxseg6ei16_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4427 }
4428 
4429 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16mf4(
4430 // CHECK-RV64-NEXT:  entry:
4431 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4432 // CHECK-RV64-NEXT:    ret void
4433 //
test_vsuxseg7ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)4434 void test_vsuxseg7ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
4435   return vsuxseg7ei16_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4436 }
4437 
4438 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16mf4(
4439 // CHECK-RV64-NEXT:  entry:
4440 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4441 // CHECK-RV64-NEXT:    ret void
4442 //
test_vsuxseg8ei16_v_u16mf4(uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)4443 void test_vsuxseg8ei16_v_u16mf4 (uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
4444   return vsuxseg8ei16_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4445 }
4446 
4447 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16mf2(
4448 // CHECK-RV64-NEXT:  entry:
4449 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4450 // CHECK-RV64-NEXT:    ret void
4451 //
test_vsuxseg2ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)4452 void test_vsuxseg2ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
4453   return vsuxseg2ei16_v_u16mf2(base, bindex, v0, v1, vl);
4454 }
4455 
4456 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16mf2(
4457 // CHECK-RV64-NEXT:  entry:
4458 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4459 // CHECK-RV64-NEXT:    ret void
4460 //
test_vsuxseg3ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)4461 void test_vsuxseg3ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
4462   return vsuxseg3ei16_v_u16mf2(base, bindex, v0, v1, v2, vl);
4463 }
4464 
4465 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16mf2(
4466 // CHECK-RV64-NEXT:  entry:
4467 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4468 // CHECK-RV64-NEXT:    ret void
4469 //
test_vsuxseg4ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)4470 void test_vsuxseg4ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
4471   return vsuxseg4ei16_v_u16mf2(base, bindex, v0, v1, v2, v3, vl);
4472 }
4473 
4474 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16mf2(
4475 // CHECK-RV64-NEXT:  entry:
4476 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4477 // CHECK-RV64-NEXT:    ret void
4478 //
test_vsuxseg5ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)4479 void test_vsuxseg5ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
4480   return vsuxseg5ei16_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
4481 }
4482 
4483 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16mf2(
4484 // CHECK-RV64-NEXT:  entry:
4485 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4486 // CHECK-RV64-NEXT:    ret void
4487 //
test_vsuxseg6ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)4488 void test_vsuxseg6ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
4489   return vsuxseg6ei16_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4490 }
4491 
4492 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16mf2(
4493 // CHECK-RV64-NEXT:  entry:
4494 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4495 // CHECK-RV64-NEXT:    ret void
4496 //
test_vsuxseg7ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)4497 void test_vsuxseg7ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
4498   return vsuxseg7ei16_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4499 }
4500 
4501 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16mf2(
4502 // CHECK-RV64-NEXT:  entry:
4503 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4504 // CHECK-RV64-NEXT:    ret void
4505 //
test_vsuxseg8ei16_v_u16mf2(uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)4506 void test_vsuxseg8ei16_v_u16mf2 (uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
4507   return vsuxseg8ei16_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4508 }
4509 
4510 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m1(
4511 // CHECK-RV64-NEXT:  entry:
4512 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4513 // CHECK-RV64-NEXT:    ret void
4514 //
test_vsuxseg2ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)4515 void test_vsuxseg2ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
4516   return vsuxseg2ei16_v_u16m1(base, bindex, v0, v1, vl);
4517 }
4518 
4519 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16m1(
4520 // CHECK-RV64-NEXT:  entry:
4521 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4522 // CHECK-RV64-NEXT:    ret void
4523 //
test_vsuxseg3ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)4524 void test_vsuxseg3ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
4525   return vsuxseg3ei16_v_u16m1(base, bindex, v0, v1, v2, vl);
4526 }
4527 
4528 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16m1(
4529 // CHECK-RV64-NEXT:  entry:
4530 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4531 // CHECK-RV64-NEXT:    ret void
4532 //
test_vsuxseg4ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)4533 void test_vsuxseg4ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
4534   return vsuxseg4ei16_v_u16m1(base, bindex, v0, v1, v2, v3, vl);
4535 }
4536 
4537 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16m1(
4538 // CHECK-RV64-NEXT:  entry:
4539 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4540 // CHECK-RV64-NEXT:    ret void
4541 //
test_vsuxseg5ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)4542 void test_vsuxseg5ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
4543   return vsuxseg5ei16_v_u16m1(base, bindex, v0, v1, v2, v3, v4, vl);
4544 }
4545 
4546 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16m1(
4547 // CHECK-RV64-NEXT:  entry:
4548 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4549 // CHECK-RV64-NEXT:    ret void
4550 //
test_vsuxseg6ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)4551 void test_vsuxseg6ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
4552   return vsuxseg6ei16_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4553 }
4554 
4555 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16m1(
4556 // CHECK-RV64-NEXT:  entry:
4557 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4558 // CHECK-RV64-NEXT:    ret void
4559 //
test_vsuxseg7ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)4560 void test_vsuxseg7ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
4561   return vsuxseg7ei16_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4562 }
4563 
4564 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16m1(
4565 // CHECK-RV64-NEXT:  entry:
4566 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4567 // CHECK-RV64-NEXT:    ret void
4568 //
test_vsuxseg8ei16_v_u16m1(uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)4569 void test_vsuxseg8ei16_v_u16m1 (uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
4570   return vsuxseg8ei16_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4571 }
4572 
4573 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m2(
4574 // CHECK-RV64-NEXT:  entry:
4575 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4576 // CHECK-RV64-NEXT:    ret void
4577 //
test_vsuxseg2ei16_v_u16m2(uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)4578 void test_vsuxseg2ei16_v_u16m2 (uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
4579   return vsuxseg2ei16_v_u16m2(base, bindex, v0, v1, vl);
4580 }
4581 
4582 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16m2(
4583 // CHECK-RV64-NEXT:  entry:
4584 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4585 // CHECK-RV64-NEXT:    ret void
4586 //
test_vsuxseg3ei16_v_u16m2(uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)4587 void test_vsuxseg3ei16_v_u16m2 (uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
4588   return vsuxseg3ei16_v_u16m2(base, bindex, v0, v1, v2, vl);
4589 }
4590 
4591 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16m2(
4592 // CHECK-RV64-NEXT:  entry:
4593 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4594 // CHECK-RV64-NEXT:    ret void
4595 //
test_vsuxseg4ei16_v_u16m2(uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)4596 void test_vsuxseg4ei16_v_u16m2 (uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
4597   return vsuxseg4ei16_v_u16m2(base, bindex, v0, v1, v2, v3, vl);
4598 }
4599 
4600 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m4(
4601 // CHECK-RV64-NEXT:  entry:
4602 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
4603 // CHECK-RV64-NEXT:    ret void
4604 //
test_vsuxseg2ei16_v_u16m4(uint16_t * base,vuint16m4_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)4605 void test_vsuxseg2ei16_v_u16m4 (uint16_t *base, vuint16m4_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
4606   return vsuxseg2ei16_v_u16m4(base, bindex, v0, v1, vl);
4607 }
4608 
4609 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16mf4(
4610 // CHECK-RV64-NEXT:  entry:
4611 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4612 // CHECK-RV64-NEXT:    ret void
4613 //
test_vsuxseg2ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)4614 void test_vsuxseg2ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
4615   return vsuxseg2ei32_v_u16mf4(base, bindex, v0, v1, vl);
4616 }
4617 
4618 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16mf4(
4619 // CHECK-RV64-NEXT:  entry:
4620 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4621 // CHECK-RV64-NEXT:    ret void
4622 //
test_vsuxseg3ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)4623 void test_vsuxseg3ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
4624   return vsuxseg3ei32_v_u16mf4(base, bindex, v0, v1, v2, vl);
4625 }
4626 
4627 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16mf4(
4628 // CHECK-RV64-NEXT:  entry:
4629 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4630 // CHECK-RV64-NEXT:    ret void
4631 //
test_vsuxseg4ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)4632 void test_vsuxseg4ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
4633   return vsuxseg4ei32_v_u16mf4(base, bindex, v0, v1, v2, v3, vl);
4634 }
4635 
4636 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16mf4(
4637 // CHECK-RV64-NEXT:  entry:
4638 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4639 // CHECK-RV64-NEXT:    ret void
4640 //
test_vsuxseg5ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)4641 void test_vsuxseg5ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
4642   return vsuxseg5ei32_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
4643 }
4644 
4645 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16mf4(
4646 // CHECK-RV64-NEXT:  entry:
4647 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4648 // CHECK-RV64-NEXT:    ret void
4649 //
test_vsuxseg6ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)4650 void test_vsuxseg6ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
4651   return vsuxseg6ei32_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4652 }
4653 
4654 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16mf4(
4655 // CHECK-RV64-NEXT:  entry:
4656 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4657 // CHECK-RV64-NEXT:    ret void
4658 //
test_vsuxseg7ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)4659 void test_vsuxseg7ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
4660   return vsuxseg7ei32_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4661 }
4662 
4663 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16mf4(
4664 // CHECK-RV64-NEXT:  entry:
4665 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4666 // CHECK-RV64-NEXT:    ret void
4667 //
test_vsuxseg8ei32_v_u16mf4(uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)4668 void test_vsuxseg8ei32_v_u16mf4 (uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
4669   return vsuxseg8ei32_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4670 }
4671 
4672 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16mf2(
4673 // CHECK-RV64-NEXT:  entry:
4674 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4675 // CHECK-RV64-NEXT:    ret void
4676 //
test_vsuxseg2ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)4677 void test_vsuxseg2ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
4678   return vsuxseg2ei32_v_u16mf2(base, bindex, v0, v1, vl);
4679 }
4680 
4681 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16mf2(
4682 // CHECK-RV64-NEXT:  entry:
4683 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4684 // CHECK-RV64-NEXT:    ret void
4685 //
test_vsuxseg3ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)4686 void test_vsuxseg3ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
4687   return vsuxseg3ei32_v_u16mf2(base, bindex, v0, v1, v2, vl);
4688 }
4689 
4690 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16mf2(
4691 // CHECK-RV64-NEXT:  entry:
4692 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4693 // CHECK-RV64-NEXT:    ret void
4694 //
test_vsuxseg4ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)4695 void test_vsuxseg4ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
4696   return vsuxseg4ei32_v_u16mf2(base, bindex, v0, v1, v2, v3, vl);
4697 }
4698 
4699 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16mf2(
4700 // CHECK-RV64-NEXT:  entry:
4701 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4702 // CHECK-RV64-NEXT:    ret void
4703 //
test_vsuxseg5ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)4704 void test_vsuxseg5ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
4705   return vsuxseg5ei32_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
4706 }
4707 
4708 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16mf2(
4709 // CHECK-RV64-NEXT:  entry:
4710 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4711 // CHECK-RV64-NEXT:    ret void
4712 //
test_vsuxseg6ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)4713 void test_vsuxseg6ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
4714   return vsuxseg6ei32_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4715 }
4716 
4717 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16mf2(
4718 // CHECK-RV64-NEXT:  entry:
4719 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4720 // CHECK-RV64-NEXT:    ret void
4721 //
test_vsuxseg7ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)4722 void test_vsuxseg7ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
4723   return vsuxseg7ei32_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4724 }
4725 
4726 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16mf2(
4727 // CHECK-RV64-NEXT:  entry:
4728 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4729 // CHECK-RV64-NEXT:    ret void
4730 //
test_vsuxseg8ei32_v_u16mf2(uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)4731 void test_vsuxseg8ei32_v_u16mf2 (uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
4732   return vsuxseg8ei32_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4733 }
4734 
4735 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m1(
4736 // CHECK-RV64-NEXT:  entry:
4737 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4738 // CHECK-RV64-NEXT:    ret void
4739 //
test_vsuxseg2ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)4740 void test_vsuxseg2ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
4741   return vsuxseg2ei32_v_u16m1(base, bindex, v0, v1, vl);
4742 }
4743 
4744 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16m1(
4745 // CHECK-RV64-NEXT:  entry:
4746 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4747 // CHECK-RV64-NEXT:    ret void
4748 //
test_vsuxseg3ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)4749 void test_vsuxseg3ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
4750   return vsuxseg3ei32_v_u16m1(base, bindex, v0, v1, v2, vl);
4751 }
4752 
4753 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16m1(
4754 // CHECK-RV64-NEXT:  entry:
4755 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4756 // CHECK-RV64-NEXT:    ret void
4757 //
test_vsuxseg4ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)4758 void test_vsuxseg4ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
4759   return vsuxseg4ei32_v_u16m1(base, bindex, v0, v1, v2, v3, vl);
4760 }
4761 
4762 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16m1(
4763 // CHECK-RV64-NEXT:  entry:
4764 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4765 // CHECK-RV64-NEXT:    ret void
4766 //
test_vsuxseg5ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)4767 void test_vsuxseg5ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
4768   return vsuxseg5ei32_v_u16m1(base, bindex, v0, v1, v2, v3, v4, vl);
4769 }
4770 
4771 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16m1(
4772 // CHECK-RV64-NEXT:  entry:
4773 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4774 // CHECK-RV64-NEXT:    ret void
4775 //
test_vsuxseg6ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)4776 void test_vsuxseg6ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
4777   return vsuxseg6ei32_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4778 }
4779 
4780 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16m1(
4781 // CHECK-RV64-NEXT:  entry:
4782 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4783 // CHECK-RV64-NEXT:    ret void
4784 //
test_vsuxseg7ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)4785 void test_vsuxseg7ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
4786   return vsuxseg7ei32_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4787 }
4788 
4789 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16m1(
4790 // CHECK-RV64-NEXT:  entry:
4791 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4792 // CHECK-RV64-NEXT:    ret void
4793 //
test_vsuxseg8ei32_v_u16m1(uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)4794 void test_vsuxseg8ei32_v_u16m1 (uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
4795   return vsuxseg8ei32_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4796 }
4797 
4798 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m2(
4799 // CHECK-RV64-NEXT:  entry:
4800 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4801 // CHECK-RV64-NEXT:    ret void
4802 //
test_vsuxseg2ei32_v_u16m2(uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)4803 void test_vsuxseg2ei32_v_u16m2 (uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
4804   return vsuxseg2ei32_v_u16m2(base, bindex, v0, v1, vl);
4805 }
4806 
4807 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16m2(
4808 // CHECK-RV64-NEXT:  entry:
4809 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4810 // CHECK-RV64-NEXT:    ret void
4811 //
test_vsuxseg3ei32_v_u16m2(uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)4812 void test_vsuxseg3ei32_v_u16m2 (uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
4813   return vsuxseg3ei32_v_u16m2(base, bindex, v0, v1, v2, vl);
4814 }
4815 
4816 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16m2(
4817 // CHECK-RV64-NEXT:  entry:
4818 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4819 // CHECK-RV64-NEXT:    ret void
4820 //
test_vsuxseg4ei32_v_u16m2(uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)4821 void test_vsuxseg4ei32_v_u16m2 (uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
4822   return vsuxseg4ei32_v_u16m2(base, bindex, v0, v1, v2, v3, vl);
4823 }
4824 
4825 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m4(
4826 // CHECK-RV64-NEXT:  entry:
4827 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
4828 // CHECK-RV64-NEXT:    ret void
4829 //
test_vsuxseg2ei32_v_u16m4(uint16_t * base,vuint32m8_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)4830 void test_vsuxseg2ei32_v_u16m4 (uint16_t *base, vuint32m8_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
4831   return vsuxseg2ei32_v_u16m4(base, bindex, v0, v1, vl);
4832 }
4833 
4834 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16mf4(
4835 // CHECK-RV64-NEXT:  entry:
4836 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4837 // CHECK-RV64-NEXT:    ret void
4838 //
test_vsuxseg2ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)4839 void test_vsuxseg2ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
4840   return vsuxseg2ei64_v_u16mf4(base, bindex, v0, v1, vl);
4841 }
4842 
4843 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16mf4(
4844 // CHECK-RV64-NEXT:  entry:
4845 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4846 // CHECK-RV64-NEXT:    ret void
4847 //
test_vsuxseg3ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)4848 void test_vsuxseg3ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
4849   return vsuxseg3ei64_v_u16mf4(base, bindex, v0, v1, v2, vl);
4850 }
4851 
4852 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16mf4(
4853 // CHECK-RV64-NEXT:  entry:
4854 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4855 // CHECK-RV64-NEXT:    ret void
4856 //
test_vsuxseg4ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)4857 void test_vsuxseg4ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
4858   return vsuxseg4ei64_v_u16mf4(base, bindex, v0, v1, v2, v3, vl);
4859 }
4860 
4861 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16mf4(
4862 // CHECK-RV64-NEXT:  entry:
4863 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4864 // CHECK-RV64-NEXT:    ret void
4865 //
test_vsuxseg5ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)4866 void test_vsuxseg5ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
4867   return vsuxseg5ei64_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, vl);
4868 }
4869 
4870 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16mf4(
4871 // CHECK-RV64-NEXT:  entry:
4872 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4873 // CHECK-RV64-NEXT:    ret void
4874 //
test_vsuxseg6ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)4875 void test_vsuxseg6ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
4876   return vsuxseg6ei64_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4877 }
4878 
4879 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16mf4(
4880 // CHECK-RV64-NEXT:  entry:
4881 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4882 // CHECK-RV64-NEXT:    ret void
4883 //
test_vsuxseg7ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)4884 void test_vsuxseg7ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
4885   return vsuxseg7ei64_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4886 }
4887 
4888 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16mf4(
4889 // CHECK-RV64-NEXT:  entry:
4890 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4891 // CHECK-RV64-NEXT:    ret void
4892 //
test_vsuxseg8ei64_v_u16mf4(uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)4893 void test_vsuxseg8ei64_v_u16mf4 (uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
4894   return vsuxseg8ei64_v_u16mf4(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4895 }
4896 
4897 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16mf2(
4898 // CHECK-RV64-NEXT:  entry:
4899 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4900 // CHECK-RV64-NEXT:    ret void
4901 //
test_vsuxseg2ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)4902 void test_vsuxseg2ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
4903   return vsuxseg2ei64_v_u16mf2(base, bindex, v0, v1, vl);
4904 }
4905 
4906 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16mf2(
4907 // CHECK-RV64-NEXT:  entry:
4908 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4909 // CHECK-RV64-NEXT:    ret void
4910 //
test_vsuxseg3ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)4911 void test_vsuxseg3ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
4912   return vsuxseg3ei64_v_u16mf2(base, bindex, v0, v1, v2, vl);
4913 }
4914 
4915 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16mf2(
4916 // CHECK-RV64-NEXT:  entry:
4917 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4918 // CHECK-RV64-NEXT:    ret void
4919 //
test_vsuxseg4ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)4920 void test_vsuxseg4ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
4921   return vsuxseg4ei64_v_u16mf2(base, bindex, v0, v1, v2, v3, vl);
4922 }
4923 
4924 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16mf2(
4925 // CHECK-RV64-NEXT:  entry:
4926 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4927 // CHECK-RV64-NEXT:    ret void
4928 //
test_vsuxseg5ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)4929 void test_vsuxseg5ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
4930   return vsuxseg5ei64_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, vl);
4931 }
4932 
4933 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16mf2(
4934 // CHECK-RV64-NEXT:  entry:
4935 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4936 // CHECK-RV64-NEXT:    ret void
4937 //
test_vsuxseg6ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)4938 void test_vsuxseg6ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
4939   return vsuxseg6ei64_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
4940 }
4941 
4942 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16mf2(
4943 // CHECK-RV64-NEXT:  entry:
4944 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4945 // CHECK-RV64-NEXT:    ret void
4946 //
test_vsuxseg7ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)4947 void test_vsuxseg7ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
4948   return vsuxseg7ei64_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
4949 }
4950 
4951 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16mf2(
4952 // CHECK-RV64-NEXT:  entry:
4953 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4954 // CHECK-RV64-NEXT:    ret void
4955 //
test_vsuxseg8ei64_v_u16mf2(uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)4956 void test_vsuxseg8ei64_v_u16mf2 (uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
4957   return vsuxseg8ei64_v_u16mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
4958 }
4959 
4960 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16m1(
4961 // CHECK-RV64-NEXT:  entry:
4962 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4963 // CHECK-RV64-NEXT:    ret void
4964 //
test_vsuxseg2ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)4965 void test_vsuxseg2ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
4966   return vsuxseg2ei64_v_u16m1(base, bindex, v0, v1, vl);
4967 }
4968 
4969 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16m1(
4970 // CHECK-RV64-NEXT:  entry:
4971 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4972 // CHECK-RV64-NEXT:    ret void
4973 //
test_vsuxseg3ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)4974 void test_vsuxseg3ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
4975   return vsuxseg3ei64_v_u16m1(base, bindex, v0, v1, v2, vl);
4976 }
4977 
4978 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16m1(
4979 // CHECK-RV64-NEXT:  entry:
4980 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4981 // CHECK-RV64-NEXT:    ret void
4982 //
test_vsuxseg4ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)4983 void test_vsuxseg4ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
4984   return vsuxseg4ei64_v_u16m1(base, bindex, v0, v1, v2, v3, vl);
4985 }
4986 
4987 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16m1(
4988 // CHECK-RV64-NEXT:  entry:
4989 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4990 // CHECK-RV64-NEXT:    ret void
4991 //
test_vsuxseg5ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)4992 void test_vsuxseg5ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
4993   return vsuxseg5ei64_v_u16m1(base, bindex, v0, v1, v2, v3, v4, vl);
4994 }
4995 
4996 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16m1(
4997 // CHECK-RV64-NEXT:  entry:
4998 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
4999 // CHECK-RV64-NEXT:    ret void
5000 //
test_vsuxseg6ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)5001 void test_vsuxseg6ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
5002   return vsuxseg6ei64_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5003 }
5004 
5005 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16m1(
5006 // CHECK-RV64-NEXT:  entry:
5007 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5008 // CHECK-RV64-NEXT:    ret void
5009 //
test_vsuxseg7ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)5010 void test_vsuxseg7ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
5011   return vsuxseg7ei64_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5012 }
5013 
5014 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16m1(
5015 // CHECK-RV64-NEXT:  entry:
5016 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5017 // CHECK-RV64-NEXT:    ret void
5018 //
test_vsuxseg8ei64_v_u16m1(uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)5019 void test_vsuxseg8ei64_v_u16m1 (uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
5020   return vsuxseg8ei64_v_u16m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5021 }
5022 
5023 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16m2(
5024 // CHECK-RV64-NEXT:  entry:
5025 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5026 // CHECK-RV64-NEXT:    ret void
5027 //
test_vsuxseg2ei64_v_u16m2(uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)5028 void test_vsuxseg2ei64_v_u16m2 (uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
5029   return vsuxseg2ei64_v_u16m2(base, bindex, v0, v1, vl);
5030 }
5031 
5032 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16m2(
5033 // CHECK-RV64-NEXT:  entry:
5034 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5035 // CHECK-RV64-NEXT:    ret void
5036 //
test_vsuxseg3ei64_v_u16m2(uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)5037 void test_vsuxseg3ei64_v_u16m2 (uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
5038   return vsuxseg3ei64_v_u16m2(base, bindex, v0, v1, v2, vl);
5039 }
5040 
5041 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16m2(
5042 // CHECK-RV64-NEXT:  entry:
5043 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5044 // CHECK-RV64-NEXT:    ret void
5045 //
test_vsuxseg4ei64_v_u16m2(uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)5046 void test_vsuxseg4ei64_v_u16m2 (uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
5047   return vsuxseg4ei64_v_u16m2(base, bindex, v0, v1, v2, v3, vl);
5048 }
5049 
5050 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32mf2(
5051 // CHECK-RV64-NEXT:  entry:
5052 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5053 // CHECK-RV64-NEXT:    ret void
5054 //
test_vsuxseg2ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)5055 void test_vsuxseg2ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
5056   return vsuxseg2ei8_v_u32mf2(base, bindex, v0, v1, vl);
5057 }
5058 
5059 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32mf2(
5060 // CHECK-RV64-NEXT:  entry:
5061 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5062 // CHECK-RV64-NEXT:    ret void
5063 //
test_vsuxseg3ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)5064 void test_vsuxseg3ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
5065   return vsuxseg3ei8_v_u32mf2(base, bindex, v0, v1, v2, vl);
5066 }
5067 
5068 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32mf2(
5069 // CHECK-RV64-NEXT:  entry:
5070 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5071 // CHECK-RV64-NEXT:    ret void
5072 //
test_vsuxseg4ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)5073 void test_vsuxseg4ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
5074   return vsuxseg4ei8_v_u32mf2(base, bindex, v0, v1, v2, v3, vl);
5075 }
5076 
5077 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u32mf2(
5078 // CHECK-RV64-NEXT:  entry:
5079 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5080 // CHECK-RV64-NEXT:    ret void
5081 //
test_vsuxseg5ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)5082 void test_vsuxseg5ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
5083   return vsuxseg5ei8_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
5084 }
5085 
5086 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u32mf2(
5087 // CHECK-RV64-NEXT:  entry:
5088 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5089 // CHECK-RV64-NEXT:    ret void
5090 //
test_vsuxseg6ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)5091 void test_vsuxseg6ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
5092   return vsuxseg6ei8_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5093 }
5094 
5095 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u32mf2(
5096 // CHECK-RV64-NEXT:  entry:
5097 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5098 // CHECK-RV64-NEXT:    ret void
5099 //
test_vsuxseg7ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)5100 void test_vsuxseg7ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
5101   return vsuxseg7ei8_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5102 }
5103 
5104 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u32mf2(
5105 // CHECK-RV64-NEXT:  entry:
5106 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5107 // CHECK-RV64-NEXT:    ret void
5108 //
test_vsuxseg8ei8_v_u32mf2(uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)5109 void test_vsuxseg8ei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
5110   return vsuxseg8ei8_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5111 }
5112 
5113 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m1(
5114 // CHECK-RV64-NEXT:  entry:
5115 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5116 // CHECK-RV64-NEXT:    ret void
5117 //
test_vsuxseg2ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)5118 void test_vsuxseg2ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
5119   return vsuxseg2ei8_v_u32m1(base, bindex, v0, v1, vl);
5120 }
5121 
5122 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32m1(
5123 // CHECK-RV64-NEXT:  entry:
5124 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5125 // CHECK-RV64-NEXT:    ret void
5126 //
test_vsuxseg3ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)5127 void test_vsuxseg3ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
5128   return vsuxseg3ei8_v_u32m1(base, bindex, v0, v1, v2, vl);
5129 }
5130 
5131 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32m1(
5132 // CHECK-RV64-NEXT:  entry:
5133 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5134 // CHECK-RV64-NEXT:    ret void
5135 //
test_vsuxseg4ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)5136 void test_vsuxseg4ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
5137   return vsuxseg4ei8_v_u32m1(base, bindex, v0, v1, v2, v3, vl);
5138 }
5139 
5140 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u32m1(
5141 // CHECK-RV64-NEXT:  entry:
5142 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5143 // CHECK-RV64-NEXT:    ret void
5144 //
test_vsuxseg5ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)5145 void test_vsuxseg5ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
5146   return vsuxseg5ei8_v_u32m1(base, bindex, v0, v1, v2, v3, v4, vl);
5147 }
5148 
5149 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u32m1(
5150 // CHECK-RV64-NEXT:  entry:
5151 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5152 // CHECK-RV64-NEXT:    ret void
5153 //
test_vsuxseg6ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)5154 void test_vsuxseg6ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
5155   return vsuxseg6ei8_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5156 }
5157 
5158 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u32m1(
5159 // CHECK-RV64-NEXT:  entry:
5160 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5161 // CHECK-RV64-NEXT:    ret void
5162 //
test_vsuxseg7ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)5163 void test_vsuxseg7ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
5164   return vsuxseg7ei8_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5165 }
5166 
5167 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u32m1(
5168 // CHECK-RV64-NEXT:  entry:
5169 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5170 // CHECK-RV64-NEXT:    ret void
5171 //
test_vsuxseg8ei8_v_u32m1(uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)5172 void test_vsuxseg8ei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
5173   return vsuxseg8ei8_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5174 }
5175 
5176 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m2(
5177 // CHECK-RV64-NEXT:  entry:
5178 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5179 // CHECK-RV64-NEXT:    ret void
5180 //
test_vsuxseg2ei8_v_u32m2(uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)5181 void test_vsuxseg2ei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
5182   return vsuxseg2ei8_v_u32m2(base, bindex, v0, v1, vl);
5183 }
5184 
5185 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32m2(
5186 // CHECK-RV64-NEXT:  entry:
5187 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5188 // CHECK-RV64-NEXT:    ret void
5189 //
test_vsuxseg3ei8_v_u32m2(uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)5190 void test_vsuxseg3ei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
5191   return vsuxseg3ei8_v_u32m2(base, bindex, v0, v1, v2, vl);
5192 }
5193 
5194 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32m2(
5195 // CHECK-RV64-NEXT:  entry:
5196 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5197 // CHECK-RV64-NEXT:    ret void
5198 //
test_vsuxseg4ei8_v_u32m2(uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)5199 void test_vsuxseg4ei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
5200   return vsuxseg4ei8_v_u32m2(base, bindex, v0, v1, v2, v3, vl);
5201 }
5202 
5203 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m4(
5204 // CHECK-RV64-NEXT:  entry:
5205 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5206 // CHECK-RV64-NEXT:    ret void
5207 //
test_vsuxseg2ei8_v_u32m4(uint32_t * base,vuint8m1_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)5208 void test_vsuxseg2ei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
5209   return vsuxseg2ei8_v_u32m4(base, bindex, v0, v1, vl);
5210 }
5211 
5212 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32mf2(
5213 // CHECK-RV64-NEXT:  entry:
5214 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5215 // CHECK-RV64-NEXT:    ret void
5216 //
test_vsuxseg2ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)5217 void test_vsuxseg2ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
5218   return vsuxseg2ei16_v_u32mf2(base, bindex, v0, v1, vl);
5219 }
5220 
5221 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32mf2(
5222 // CHECK-RV64-NEXT:  entry:
5223 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5224 // CHECK-RV64-NEXT:    ret void
5225 //
test_vsuxseg3ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)5226 void test_vsuxseg3ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
5227   return vsuxseg3ei16_v_u32mf2(base, bindex, v0, v1, v2, vl);
5228 }
5229 
5230 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32mf2(
5231 // CHECK-RV64-NEXT:  entry:
5232 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5233 // CHECK-RV64-NEXT:    ret void
5234 //
test_vsuxseg4ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)5235 void test_vsuxseg4ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
5236   return vsuxseg4ei16_v_u32mf2(base, bindex, v0, v1, v2, v3, vl);
5237 }
5238 
5239 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u32mf2(
5240 // CHECK-RV64-NEXT:  entry:
5241 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5242 // CHECK-RV64-NEXT:    ret void
5243 //
test_vsuxseg5ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)5244 void test_vsuxseg5ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
5245   return vsuxseg5ei16_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
5246 }
5247 
5248 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u32mf2(
5249 // CHECK-RV64-NEXT:  entry:
5250 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5251 // CHECK-RV64-NEXT:    ret void
5252 //
test_vsuxseg6ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)5253 void test_vsuxseg6ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
5254   return vsuxseg6ei16_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5255 }
5256 
5257 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u32mf2(
5258 // CHECK-RV64-NEXT:  entry:
5259 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5260 // CHECK-RV64-NEXT:    ret void
5261 //
test_vsuxseg7ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)5262 void test_vsuxseg7ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
5263   return vsuxseg7ei16_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5264 }
5265 
5266 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u32mf2(
5267 // CHECK-RV64-NEXT:  entry:
5268 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5269 // CHECK-RV64-NEXT:    ret void
5270 //
test_vsuxseg8ei16_v_u32mf2(uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)5271 void test_vsuxseg8ei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
5272   return vsuxseg8ei16_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5273 }
5274 
5275 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m1(
5276 // CHECK-RV64-NEXT:  entry:
5277 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5278 // CHECK-RV64-NEXT:    ret void
5279 //
test_vsuxseg2ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)5280 void test_vsuxseg2ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
5281   return vsuxseg2ei16_v_u32m1(base, bindex, v0, v1, vl);
5282 }
5283 
5284 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32m1(
5285 // CHECK-RV64-NEXT:  entry:
5286 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5287 // CHECK-RV64-NEXT:    ret void
5288 //
test_vsuxseg3ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)5289 void test_vsuxseg3ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
5290   return vsuxseg3ei16_v_u32m1(base, bindex, v0, v1, v2, vl);
5291 }
5292 
5293 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32m1(
5294 // CHECK-RV64-NEXT:  entry:
5295 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5296 // CHECK-RV64-NEXT:    ret void
5297 //
test_vsuxseg4ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)5298 void test_vsuxseg4ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
5299   return vsuxseg4ei16_v_u32m1(base, bindex, v0, v1, v2, v3, vl);
5300 }
5301 
5302 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u32m1(
5303 // CHECK-RV64-NEXT:  entry:
5304 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5305 // CHECK-RV64-NEXT:    ret void
5306 //
test_vsuxseg5ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)5307 void test_vsuxseg5ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
5308   return vsuxseg5ei16_v_u32m1(base, bindex, v0, v1, v2, v3, v4, vl);
5309 }
5310 
5311 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u32m1(
5312 // CHECK-RV64-NEXT:  entry:
5313 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5314 // CHECK-RV64-NEXT:    ret void
5315 //
test_vsuxseg6ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)5316 void test_vsuxseg6ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
5317   return vsuxseg6ei16_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5318 }
5319 
5320 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u32m1(
5321 // CHECK-RV64-NEXT:  entry:
5322 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5323 // CHECK-RV64-NEXT:    ret void
5324 //
test_vsuxseg7ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)5325 void test_vsuxseg7ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
5326   return vsuxseg7ei16_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5327 }
5328 
5329 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u32m1(
5330 // CHECK-RV64-NEXT:  entry:
5331 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5332 // CHECK-RV64-NEXT:    ret void
5333 //
test_vsuxseg8ei16_v_u32m1(uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)5334 void test_vsuxseg8ei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
5335   return vsuxseg8ei16_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5336 }
5337 
5338 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m2(
5339 // CHECK-RV64-NEXT:  entry:
5340 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5341 // CHECK-RV64-NEXT:    ret void
5342 //
test_vsuxseg2ei16_v_u32m2(uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)5343 void test_vsuxseg2ei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
5344   return vsuxseg2ei16_v_u32m2(base, bindex, v0, v1, vl);
5345 }
5346 
5347 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32m2(
5348 // CHECK-RV64-NEXT:  entry:
5349 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5350 // CHECK-RV64-NEXT:    ret void
5351 //
test_vsuxseg3ei16_v_u32m2(uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)5352 void test_vsuxseg3ei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
5353   return vsuxseg3ei16_v_u32m2(base, bindex, v0, v1, v2, vl);
5354 }
5355 
5356 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32m2(
5357 // CHECK-RV64-NEXT:  entry:
5358 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5359 // CHECK-RV64-NEXT:    ret void
5360 //
test_vsuxseg4ei16_v_u32m2(uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)5361 void test_vsuxseg4ei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
5362   return vsuxseg4ei16_v_u32m2(base, bindex, v0, v1, v2, v3, vl);
5363 }
5364 
5365 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m4(
5366 // CHECK-RV64-NEXT:  entry:
5367 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5368 // CHECK-RV64-NEXT:    ret void
5369 //
test_vsuxseg2ei16_v_u32m4(uint32_t * base,vuint16m2_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)5370 void test_vsuxseg2ei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
5371   return vsuxseg2ei16_v_u32m4(base, bindex, v0, v1, vl);
5372 }
5373 
5374 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32mf2(
5375 // CHECK-RV64-NEXT:  entry:
5376 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5377 // CHECK-RV64-NEXT:    ret void
5378 //
test_vsuxseg2ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)5379 void test_vsuxseg2ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
5380   return vsuxseg2ei32_v_u32mf2(base, bindex, v0, v1, vl);
5381 }
5382 
5383 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32mf2(
5384 // CHECK-RV64-NEXT:  entry:
5385 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5386 // CHECK-RV64-NEXT:    ret void
5387 //
test_vsuxseg3ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)5388 void test_vsuxseg3ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
5389   return vsuxseg3ei32_v_u32mf2(base, bindex, v0, v1, v2, vl);
5390 }
5391 
5392 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32mf2(
5393 // CHECK-RV64-NEXT:  entry:
5394 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5395 // CHECK-RV64-NEXT:    ret void
5396 //
test_vsuxseg4ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)5397 void test_vsuxseg4ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
5398   return vsuxseg4ei32_v_u32mf2(base, bindex, v0, v1, v2, v3, vl);
5399 }
5400 
5401 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u32mf2(
5402 // CHECK-RV64-NEXT:  entry:
5403 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5404 // CHECK-RV64-NEXT:    ret void
5405 //
test_vsuxseg5ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)5406 void test_vsuxseg5ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
5407   return vsuxseg5ei32_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
5408 }
5409 
5410 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u32mf2(
5411 // CHECK-RV64-NEXT:  entry:
5412 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5413 // CHECK-RV64-NEXT:    ret void
5414 //
test_vsuxseg6ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)5415 void test_vsuxseg6ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
5416   return vsuxseg6ei32_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5417 }
5418 
5419 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u32mf2(
5420 // CHECK-RV64-NEXT:  entry:
5421 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5422 // CHECK-RV64-NEXT:    ret void
5423 //
test_vsuxseg7ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)5424 void test_vsuxseg7ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
5425   return vsuxseg7ei32_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5426 }
5427 
5428 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u32mf2(
5429 // CHECK-RV64-NEXT:  entry:
5430 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5431 // CHECK-RV64-NEXT:    ret void
5432 //
test_vsuxseg8ei32_v_u32mf2(uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)5433 void test_vsuxseg8ei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
5434   return vsuxseg8ei32_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5435 }
5436 
5437 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m1(
5438 // CHECK-RV64-NEXT:  entry:
5439 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5440 // CHECK-RV64-NEXT:    ret void
5441 //
test_vsuxseg2ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)5442 void test_vsuxseg2ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
5443   return vsuxseg2ei32_v_u32m1(base, bindex, v0, v1, vl);
5444 }
5445 
5446 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32m1(
5447 // CHECK-RV64-NEXT:  entry:
5448 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5449 // CHECK-RV64-NEXT:    ret void
5450 //
test_vsuxseg3ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)5451 void test_vsuxseg3ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
5452   return vsuxseg3ei32_v_u32m1(base, bindex, v0, v1, v2, vl);
5453 }
5454 
5455 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32m1(
5456 // CHECK-RV64-NEXT:  entry:
5457 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5458 // CHECK-RV64-NEXT:    ret void
5459 //
test_vsuxseg4ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)5460 void test_vsuxseg4ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
5461   return vsuxseg4ei32_v_u32m1(base, bindex, v0, v1, v2, v3, vl);
5462 }
5463 
5464 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u32m1(
5465 // CHECK-RV64-NEXT:  entry:
5466 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5467 // CHECK-RV64-NEXT:    ret void
5468 //
test_vsuxseg5ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)5469 void test_vsuxseg5ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
5470   return vsuxseg5ei32_v_u32m1(base, bindex, v0, v1, v2, v3, v4, vl);
5471 }
5472 
5473 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u32m1(
5474 // CHECK-RV64-NEXT:  entry:
5475 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5476 // CHECK-RV64-NEXT:    ret void
5477 //
test_vsuxseg6ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)5478 void test_vsuxseg6ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
5479   return vsuxseg6ei32_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5480 }
5481 
5482 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u32m1(
5483 // CHECK-RV64-NEXT:  entry:
5484 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5485 // CHECK-RV64-NEXT:    ret void
5486 //
test_vsuxseg7ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)5487 void test_vsuxseg7ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
5488   return vsuxseg7ei32_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5489 }
5490 
5491 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u32m1(
5492 // CHECK-RV64-NEXT:  entry:
5493 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5494 // CHECK-RV64-NEXT:    ret void
5495 //
test_vsuxseg8ei32_v_u32m1(uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)5496 void test_vsuxseg8ei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
5497   return vsuxseg8ei32_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5498 }
5499 
5500 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m2(
5501 // CHECK-RV64-NEXT:  entry:
5502 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5503 // CHECK-RV64-NEXT:    ret void
5504 //
test_vsuxseg2ei32_v_u32m2(uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)5505 void test_vsuxseg2ei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
5506   return vsuxseg2ei32_v_u32m2(base, bindex, v0, v1, vl);
5507 }
5508 
5509 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32m2(
5510 // CHECK-RV64-NEXT:  entry:
5511 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5512 // CHECK-RV64-NEXT:    ret void
5513 //
test_vsuxseg3ei32_v_u32m2(uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)5514 void test_vsuxseg3ei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
5515   return vsuxseg3ei32_v_u32m2(base, bindex, v0, v1, v2, vl);
5516 }
5517 
5518 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32m2(
5519 // CHECK-RV64-NEXT:  entry:
5520 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5521 // CHECK-RV64-NEXT:    ret void
5522 //
test_vsuxseg4ei32_v_u32m2(uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)5523 void test_vsuxseg4ei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
5524   return vsuxseg4ei32_v_u32m2(base, bindex, v0, v1, v2, v3, vl);
5525 }
5526 
5527 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m4(
5528 // CHECK-RV64-NEXT:  entry:
5529 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5530 // CHECK-RV64-NEXT:    ret void
5531 //
test_vsuxseg2ei32_v_u32m4(uint32_t * base,vuint32m4_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)5532 void test_vsuxseg2ei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
5533   return vsuxseg2ei32_v_u32m4(base, bindex, v0, v1, vl);
5534 }
5535 
5536 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32mf2(
5537 // CHECK-RV64-NEXT:  entry:
5538 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5539 // CHECK-RV64-NEXT:    ret void
5540 //
test_vsuxseg2ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)5541 void test_vsuxseg2ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
5542   return vsuxseg2ei64_v_u32mf2(base, bindex, v0, v1, vl);
5543 }
5544 
5545 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32mf2(
5546 // CHECK-RV64-NEXT:  entry:
5547 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5548 // CHECK-RV64-NEXT:    ret void
5549 //
test_vsuxseg3ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)5550 void test_vsuxseg3ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
5551   return vsuxseg3ei64_v_u32mf2(base, bindex, v0, v1, v2, vl);
5552 }
5553 
5554 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32mf2(
5555 // CHECK-RV64-NEXT:  entry:
5556 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5557 // CHECK-RV64-NEXT:    ret void
5558 //
test_vsuxseg4ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)5559 void test_vsuxseg4ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
5560   return vsuxseg4ei64_v_u32mf2(base, bindex, v0, v1, v2, v3, vl);
5561 }
5562 
5563 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u32mf2(
5564 // CHECK-RV64-NEXT:  entry:
5565 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5566 // CHECK-RV64-NEXT:    ret void
5567 //
test_vsuxseg5ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)5568 void test_vsuxseg5ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
5569   return vsuxseg5ei64_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
5570 }
5571 
5572 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u32mf2(
5573 // CHECK-RV64-NEXT:  entry:
5574 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5575 // CHECK-RV64-NEXT:    ret void
5576 //
test_vsuxseg6ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)5577 void test_vsuxseg6ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
5578   return vsuxseg6ei64_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5579 }
5580 
5581 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u32mf2(
5582 // CHECK-RV64-NEXT:  entry:
5583 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5584 // CHECK-RV64-NEXT:    ret void
5585 //
test_vsuxseg7ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)5586 void test_vsuxseg7ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
5587   return vsuxseg7ei64_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5588 }
5589 
5590 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u32mf2(
5591 // CHECK-RV64-NEXT:  entry:
5592 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5593 // CHECK-RV64-NEXT:    ret void
5594 //
test_vsuxseg8ei64_v_u32mf2(uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)5595 void test_vsuxseg8ei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
5596   return vsuxseg8ei64_v_u32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5597 }
5598 
5599 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m1(
5600 // CHECK-RV64-NEXT:  entry:
5601 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5602 // CHECK-RV64-NEXT:    ret void
5603 //
test_vsuxseg2ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)5604 void test_vsuxseg2ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
5605   return vsuxseg2ei64_v_u32m1(base, bindex, v0, v1, vl);
5606 }
5607 
5608 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32m1(
5609 // CHECK-RV64-NEXT:  entry:
5610 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5611 // CHECK-RV64-NEXT:    ret void
5612 //
test_vsuxseg3ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)5613 void test_vsuxseg3ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
5614   return vsuxseg3ei64_v_u32m1(base, bindex, v0, v1, v2, vl);
5615 }
5616 
5617 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32m1(
5618 // CHECK-RV64-NEXT:  entry:
5619 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5620 // CHECK-RV64-NEXT:    ret void
5621 //
test_vsuxseg4ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)5622 void test_vsuxseg4ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
5623   return vsuxseg4ei64_v_u32m1(base, bindex, v0, v1, v2, v3, vl);
5624 }
5625 
5626 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u32m1(
5627 // CHECK-RV64-NEXT:  entry:
5628 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5629 // CHECK-RV64-NEXT:    ret void
5630 //
test_vsuxseg5ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)5631 void test_vsuxseg5ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
5632   return vsuxseg5ei64_v_u32m1(base, bindex, v0, v1, v2, v3, v4, vl);
5633 }
5634 
5635 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u32m1(
5636 // CHECK-RV64-NEXT:  entry:
5637 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5638 // CHECK-RV64-NEXT:    ret void
5639 //
test_vsuxseg6ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)5640 void test_vsuxseg6ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
5641   return vsuxseg6ei64_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5642 }
5643 
5644 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u32m1(
5645 // CHECK-RV64-NEXT:  entry:
5646 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5647 // CHECK-RV64-NEXT:    ret void
5648 //
test_vsuxseg7ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)5649 void test_vsuxseg7ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
5650   return vsuxseg7ei64_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5651 }
5652 
5653 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u32m1(
5654 // CHECK-RV64-NEXT:  entry:
5655 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5656 // CHECK-RV64-NEXT:    ret void
5657 //
test_vsuxseg8ei64_v_u32m1(uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)5658 void test_vsuxseg8ei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
5659   return vsuxseg8ei64_v_u32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5660 }
5661 
5662 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m2(
5663 // CHECK-RV64-NEXT:  entry:
5664 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5665 // CHECK-RV64-NEXT:    ret void
5666 //
test_vsuxseg2ei64_v_u32m2(uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)5667 void test_vsuxseg2ei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
5668   return vsuxseg2ei64_v_u32m2(base, bindex, v0, v1, vl);
5669 }
5670 
5671 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32m2(
5672 // CHECK-RV64-NEXT:  entry:
5673 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5674 // CHECK-RV64-NEXT:    ret void
5675 //
test_vsuxseg3ei64_v_u32m2(uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)5676 void test_vsuxseg3ei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
5677   return vsuxseg3ei64_v_u32m2(base, bindex, v0, v1, v2, vl);
5678 }
5679 
5680 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32m2(
5681 // CHECK-RV64-NEXT:  entry:
5682 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5683 // CHECK-RV64-NEXT:    ret void
5684 //
test_vsuxseg4ei64_v_u32m2(uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)5685 void test_vsuxseg4ei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
5686   return vsuxseg4ei64_v_u32m2(base, bindex, v0, v1, v2, v3, vl);
5687 }
5688 
5689 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m4(
5690 // CHECK-RV64-NEXT:  entry:
5691 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5692 // CHECK-RV64-NEXT:    ret void
5693 //
test_vsuxseg2ei64_v_u32m4(uint32_t * base,vuint64m8_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)5694 void test_vsuxseg2ei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
5695   return vsuxseg2ei64_v_u32m4(base, bindex, v0, v1, vl);
5696 }
5697 
5698 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m1(
5699 // CHECK-RV64-NEXT:  entry:
5700 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5701 // CHECK-RV64-NEXT:    ret void
5702 //
test_vsuxseg2ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)5703 void test_vsuxseg2ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
5704   return vsuxseg2ei8_v_u64m1(base, bindex, v0, v1, vl);
5705 }
5706 
5707 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u64m1(
5708 // CHECK-RV64-NEXT:  entry:
5709 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5710 // CHECK-RV64-NEXT:    ret void
5711 //
test_vsuxseg3ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)5712 void test_vsuxseg3ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
5713   return vsuxseg3ei8_v_u64m1(base, bindex, v0, v1, v2, vl);
5714 }
5715 
5716 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u64m1(
5717 // CHECK-RV64-NEXT:  entry:
5718 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5719 // CHECK-RV64-NEXT:    ret void
5720 //
test_vsuxseg4ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)5721 void test_vsuxseg4ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
5722   return vsuxseg4ei8_v_u64m1(base, bindex, v0, v1, v2, v3, vl);
5723 }
5724 
5725 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u64m1(
5726 // CHECK-RV64-NEXT:  entry:
5727 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5728 // CHECK-RV64-NEXT:    ret void
5729 //
test_vsuxseg5ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)5730 void test_vsuxseg5ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
5731   return vsuxseg5ei8_v_u64m1(base, bindex, v0, v1, v2, v3, v4, vl);
5732 }
5733 
5734 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u64m1(
5735 // CHECK-RV64-NEXT:  entry:
5736 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5737 // CHECK-RV64-NEXT:    ret void
5738 //
test_vsuxseg6ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)5739 void test_vsuxseg6ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
5740   return vsuxseg6ei8_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5741 }
5742 
5743 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u64m1(
5744 // CHECK-RV64-NEXT:  entry:
5745 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5746 // CHECK-RV64-NEXT:    ret void
5747 //
test_vsuxseg7ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)5748 void test_vsuxseg7ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
5749   return vsuxseg7ei8_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5750 }
5751 
5752 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u64m1(
5753 // CHECK-RV64-NEXT:  entry:
5754 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5755 // CHECK-RV64-NEXT:    ret void
5756 //
test_vsuxseg8ei8_v_u64m1(uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)5757 void test_vsuxseg8ei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
5758   return vsuxseg8ei8_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5759 }
5760 
5761 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m2(
5762 // CHECK-RV64-NEXT:  entry:
5763 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5764 // CHECK-RV64-NEXT:    ret void
5765 //
test_vsuxseg2ei8_v_u64m2(uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)5766 void test_vsuxseg2ei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
5767   return vsuxseg2ei8_v_u64m2(base, bindex, v0, v1, vl);
5768 }
5769 
5770 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u64m2(
5771 // CHECK-RV64-NEXT:  entry:
5772 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5773 // CHECK-RV64-NEXT:    ret void
5774 //
test_vsuxseg3ei8_v_u64m2(uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)5775 void test_vsuxseg3ei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
5776   return vsuxseg3ei8_v_u64m2(base, bindex, v0, v1, v2, vl);
5777 }
5778 
5779 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u64m2(
5780 // CHECK-RV64-NEXT:  entry:
5781 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5782 // CHECK-RV64-NEXT:    ret void
5783 //
test_vsuxseg4ei8_v_u64m2(uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)5784 void test_vsuxseg4ei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
5785   return vsuxseg4ei8_v_u64m2(base, bindex, v0, v1, v2, v3, vl);
5786 }
5787 
5788 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m4(
5789 // CHECK-RV64-NEXT:  entry:
5790 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
5791 // CHECK-RV64-NEXT:    ret void
5792 //
test_vsuxseg2ei8_v_u64m4(uint64_t * base,vuint8mf2_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)5793 void test_vsuxseg2ei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
5794   return vsuxseg2ei8_v_u64m4(base, bindex, v0, v1, vl);
5795 }
5796 
5797 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m1(
5798 // CHECK-RV64-NEXT:  entry:
5799 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5800 // CHECK-RV64-NEXT:    ret void
5801 //
test_vsuxseg2ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)5802 void test_vsuxseg2ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
5803   return vsuxseg2ei16_v_u64m1(base, bindex, v0, v1, vl);
5804 }
5805 
5806 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u64m1(
5807 // CHECK-RV64-NEXT:  entry:
5808 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5809 // CHECK-RV64-NEXT:    ret void
5810 //
test_vsuxseg3ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)5811 void test_vsuxseg3ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
5812   return vsuxseg3ei16_v_u64m1(base, bindex, v0, v1, v2, vl);
5813 }
5814 
5815 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u64m1(
5816 // CHECK-RV64-NEXT:  entry:
5817 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5818 // CHECK-RV64-NEXT:    ret void
5819 //
test_vsuxseg4ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)5820 void test_vsuxseg4ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
5821   return vsuxseg4ei16_v_u64m1(base, bindex, v0, v1, v2, v3, vl);
5822 }
5823 
5824 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u64m1(
5825 // CHECK-RV64-NEXT:  entry:
5826 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5827 // CHECK-RV64-NEXT:    ret void
5828 //
test_vsuxseg5ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)5829 void test_vsuxseg5ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
5830   return vsuxseg5ei16_v_u64m1(base, bindex, v0, v1, v2, v3, v4, vl);
5831 }
5832 
5833 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u64m1(
5834 // CHECK-RV64-NEXT:  entry:
5835 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5836 // CHECK-RV64-NEXT:    ret void
5837 //
test_vsuxseg6ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)5838 void test_vsuxseg6ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
5839   return vsuxseg6ei16_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5840 }
5841 
5842 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u64m1(
5843 // CHECK-RV64-NEXT:  entry:
5844 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5845 // CHECK-RV64-NEXT:    ret void
5846 //
test_vsuxseg7ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)5847 void test_vsuxseg7ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
5848   return vsuxseg7ei16_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5849 }
5850 
5851 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u64m1(
5852 // CHECK-RV64-NEXT:  entry:
5853 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5854 // CHECK-RV64-NEXT:    ret void
5855 //
test_vsuxseg8ei16_v_u64m1(uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)5856 void test_vsuxseg8ei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
5857   return vsuxseg8ei16_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5858 }
5859 
5860 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m2(
5861 // CHECK-RV64-NEXT:  entry:
5862 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5863 // CHECK-RV64-NEXT:    ret void
5864 //
test_vsuxseg2ei16_v_u64m2(uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)5865 void test_vsuxseg2ei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
5866   return vsuxseg2ei16_v_u64m2(base, bindex, v0, v1, vl);
5867 }
5868 
5869 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u64m2(
5870 // CHECK-RV64-NEXT:  entry:
5871 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5872 // CHECK-RV64-NEXT:    ret void
5873 //
test_vsuxseg3ei16_v_u64m2(uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)5874 void test_vsuxseg3ei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
5875   return vsuxseg3ei16_v_u64m2(base, bindex, v0, v1, v2, vl);
5876 }
5877 
5878 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u64m2(
5879 // CHECK-RV64-NEXT:  entry:
5880 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5881 // CHECK-RV64-NEXT:    ret void
5882 //
test_vsuxseg4ei16_v_u64m2(uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)5883 void test_vsuxseg4ei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
5884   return vsuxseg4ei16_v_u64m2(base, bindex, v0, v1, v2, v3, vl);
5885 }
5886 
5887 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m4(
5888 // CHECK-RV64-NEXT:  entry:
5889 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
5890 // CHECK-RV64-NEXT:    ret void
5891 //
test_vsuxseg2ei16_v_u64m4(uint64_t * base,vuint16m1_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)5892 void test_vsuxseg2ei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
5893   return vsuxseg2ei16_v_u64m4(base, bindex, v0, v1, vl);
5894 }
5895 
5896 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m1(
5897 // CHECK-RV64-NEXT:  entry:
5898 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5899 // CHECK-RV64-NEXT:    ret void
5900 //
test_vsuxseg2ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)5901 void test_vsuxseg2ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
5902   return vsuxseg2ei32_v_u64m1(base, bindex, v0, v1, vl);
5903 }
5904 
5905 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u64m1(
5906 // CHECK-RV64-NEXT:  entry:
5907 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5908 // CHECK-RV64-NEXT:    ret void
5909 //
test_vsuxseg3ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)5910 void test_vsuxseg3ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
5911   return vsuxseg3ei32_v_u64m1(base, bindex, v0, v1, v2, vl);
5912 }
5913 
5914 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u64m1(
5915 // CHECK-RV64-NEXT:  entry:
5916 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5917 // CHECK-RV64-NEXT:    ret void
5918 //
test_vsuxseg4ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)5919 void test_vsuxseg4ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
5920   return vsuxseg4ei32_v_u64m1(base, bindex, v0, v1, v2, v3, vl);
5921 }
5922 
5923 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u64m1(
5924 // CHECK-RV64-NEXT:  entry:
5925 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5926 // CHECK-RV64-NEXT:    ret void
5927 //
test_vsuxseg5ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)5928 void test_vsuxseg5ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
5929   return vsuxseg5ei32_v_u64m1(base, bindex, v0, v1, v2, v3, v4, vl);
5930 }
5931 
5932 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u64m1(
5933 // CHECK-RV64-NEXT:  entry:
5934 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5935 // CHECK-RV64-NEXT:    ret void
5936 //
test_vsuxseg6ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)5937 void test_vsuxseg6ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
5938   return vsuxseg6ei32_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
5939 }
5940 
5941 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u64m1(
5942 // CHECK-RV64-NEXT:  entry:
5943 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5944 // CHECK-RV64-NEXT:    ret void
5945 //
test_vsuxseg7ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)5946 void test_vsuxseg7ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
5947   return vsuxseg7ei32_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
5948 }
5949 
5950 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u64m1(
5951 // CHECK-RV64-NEXT:  entry:
5952 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5953 // CHECK-RV64-NEXT:    ret void
5954 //
test_vsuxseg8ei32_v_u64m1(uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)5955 void test_vsuxseg8ei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
5956   return vsuxseg8ei32_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
5957 }
5958 
5959 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m2(
5960 // CHECK-RV64-NEXT:  entry:
5961 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5962 // CHECK-RV64-NEXT:    ret void
5963 //
test_vsuxseg2ei32_v_u64m2(uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)5964 void test_vsuxseg2ei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
5965   return vsuxseg2ei32_v_u64m2(base, bindex, v0, v1, vl);
5966 }
5967 
5968 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u64m2(
5969 // CHECK-RV64-NEXT:  entry:
5970 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5971 // CHECK-RV64-NEXT:    ret void
5972 //
test_vsuxseg3ei32_v_u64m2(uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)5973 void test_vsuxseg3ei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
5974   return vsuxseg3ei32_v_u64m2(base, bindex, v0, v1, v2, vl);
5975 }
5976 
5977 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u64m2(
5978 // CHECK-RV64-NEXT:  entry:
5979 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5980 // CHECK-RV64-NEXT:    ret void
5981 //
test_vsuxseg4ei32_v_u64m2(uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)5982 void test_vsuxseg4ei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
5983   return vsuxseg4ei32_v_u64m2(base, bindex, v0, v1, v2, v3, vl);
5984 }
5985 
5986 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m4(
5987 // CHECK-RV64-NEXT:  entry:
5988 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
5989 // CHECK-RV64-NEXT:    ret void
5990 //
test_vsuxseg2ei32_v_u64m4(uint64_t * base,vuint32m2_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)5991 void test_vsuxseg2ei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
5992   return vsuxseg2ei32_v_u64m4(base, bindex, v0, v1, vl);
5993 }
5994 
5995 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m1(
5996 // CHECK-RV64-NEXT:  entry:
5997 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
5998 // CHECK-RV64-NEXT:    ret void
5999 //
test_vsuxseg2ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)6000 void test_vsuxseg2ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
6001   return vsuxseg2ei64_v_u64m1(base, bindex, v0, v1, vl);
6002 }
6003 
6004 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u64m1(
6005 // CHECK-RV64-NEXT:  entry:
6006 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6007 // CHECK-RV64-NEXT:    ret void
6008 //
test_vsuxseg3ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)6009 void test_vsuxseg3ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
6010   return vsuxseg3ei64_v_u64m1(base, bindex, v0, v1, v2, vl);
6011 }
6012 
6013 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u64m1(
6014 // CHECK-RV64-NEXT:  entry:
6015 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6016 // CHECK-RV64-NEXT:    ret void
6017 //
test_vsuxseg4ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)6018 void test_vsuxseg4ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
6019   return vsuxseg4ei64_v_u64m1(base, bindex, v0, v1, v2, v3, vl);
6020 }
6021 
6022 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u64m1(
6023 // CHECK-RV64-NEXT:  entry:
6024 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6025 // CHECK-RV64-NEXT:    ret void
6026 //
test_vsuxseg5ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)6027 void test_vsuxseg5ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
6028   return vsuxseg5ei64_v_u64m1(base, bindex, v0, v1, v2, v3, v4, vl);
6029 }
6030 
6031 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u64m1(
6032 // CHECK-RV64-NEXT:  entry:
6033 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6034 // CHECK-RV64-NEXT:    ret void
6035 //
test_vsuxseg6ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)6036 void test_vsuxseg6ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
6037   return vsuxseg6ei64_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6038 }
6039 
6040 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u64m1(
6041 // CHECK-RV64-NEXT:  entry:
6042 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6043 // CHECK-RV64-NEXT:    ret void
6044 //
test_vsuxseg7ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)6045 void test_vsuxseg7ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
6046   return vsuxseg7ei64_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6047 }
6048 
6049 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u64m1(
6050 // CHECK-RV64-NEXT:  entry:
6051 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6052 // CHECK-RV64-NEXT:    ret void
6053 //
test_vsuxseg8ei64_v_u64m1(uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)6054 void test_vsuxseg8ei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
6055   return vsuxseg8ei64_v_u64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6056 }
6057 
6058 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m2(
6059 // CHECK-RV64-NEXT:  entry:
6060 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6061 // CHECK-RV64-NEXT:    ret void
6062 //
test_vsuxseg2ei64_v_u64m2(uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)6063 void test_vsuxseg2ei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
6064   return vsuxseg2ei64_v_u64m2(base, bindex, v0, v1, vl);
6065 }
6066 
6067 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u64m2(
6068 // CHECK-RV64-NEXT:  entry:
6069 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6070 // CHECK-RV64-NEXT:    ret void
6071 //
test_vsuxseg3ei64_v_u64m2(uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)6072 void test_vsuxseg3ei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
6073   return vsuxseg3ei64_v_u64m2(base, bindex, v0, v1, v2, vl);
6074 }
6075 
6076 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u64m2(
6077 // CHECK-RV64-NEXT:  entry:
6078 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6079 // CHECK-RV64-NEXT:    ret void
6080 //
test_vsuxseg4ei64_v_u64m2(uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)6081 void test_vsuxseg4ei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
6082   return vsuxseg4ei64_v_u64m2(base, bindex, v0, v1, v2, v3, vl);
6083 }
6084 
6085 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m4(
6086 // CHECK-RV64-NEXT:  entry:
6087 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6088 // CHECK-RV64-NEXT:    ret void
6089 //
test_vsuxseg2ei64_v_u64m4(uint64_t * base,vuint64m4_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)6090 void test_vsuxseg2ei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
6091   return vsuxseg2ei64_v_u64m4(base, bindex, v0, v1, vl);
6092 }
6093 
6094 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32mf2(
6095 // CHECK-RV64-NEXT:  entry:
6096 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6097 // CHECK-RV64-NEXT:    ret void
6098 //
test_vsuxseg2ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)6099 void test_vsuxseg2ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
6100   return vsuxseg2ei8_v_f32mf2(base, bindex, v0, v1, vl);
6101 }
6102 
6103 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32mf2(
6104 // CHECK-RV64-NEXT:  entry:
6105 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6106 // CHECK-RV64-NEXT:    ret void
6107 //
test_vsuxseg3ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)6108 void test_vsuxseg3ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
6109   return vsuxseg3ei8_v_f32mf2(base, bindex, v0, v1, v2, vl);
6110 }
6111 
6112 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32mf2(
6113 // CHECK-RV64-NEXT:  entry:
6114 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6115 // CHECK-RV64-NEXT:    ret void
6116 //
test_vsuxseg4ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)6117 void test_vsuxseg4ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
6118   return vsuxseg4ei8_v_f32mf2(base, bindex, v0, v1, v2, v3, vl);
6119 }
6120 
6121 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f32mf2(
6122 // CHECK-RV64-NEXT:  entry:
6123 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6124 // CHECK-RV64-NEXT:    ret void
6125 //
test_vsuxseg5ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)6126 void test_vsuxseg5ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
6127   return vsuxseg5ei8_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
6128 }
6129 
6130 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f32mf2(
6131 // CHECK-RV64-NEXT:  entry:
6132 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6133 // CHECK-RV64-NEXT:    ret void
6134 //
test_vsuxseg6ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)6135 void test_vsuxseg6ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
6136   return vsuxseg6ei8_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6137 }
6138 
6139 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f32mf2(
6140 // CHECK-RV64-NEXT:  entry:
6141 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6142 // CHECK-RV64-NEXT:    ret void
6143 //
test_vsuxseg7ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)6144 void test_vsuxseg7ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
6145   return vsuxseg7ei8_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6146 }
6147 
6148 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f32mf2(
6149 // CHECK-RV64-NEXT:  entry:
6150 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6151 // CHECK-RV64-NEXT:    ret void
6152 //
test_vsuxseg8ei8_v_f32mf2(float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)6153 void test_vsuxseg8ei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
6154   return vsuxseg8ei8_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6155 }
6156 
6157 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m1(
6158 // CHECK-RV64-NEXT:  entry:
6159 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6160 // CHECK-RV64-NEXT:    ret void
6161 //
test_vsuxseg2ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)6162 void test_vsuxseg2ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
6163   return vsuxseg2ei8_v_f32m1(base, bindex, v0, v1, vl);
6164 }
6165 
6166 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32m1(
6167 // CHECK-RV64-NEXT:  entry:
6168 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6169 // CHECK-RV64-NEXT:    ret void
6170 //
test_vsuxseg3ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)6171 void test_vsuxseg3ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
6172   return vsuxseg3ei8_v_f32m1(base, bindex, v0, v1, v2, vl);
6173 }
6174 
6175 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32m1(
6176 // CHECK-RV64-NEXT:  entry:
6177 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6178 // CHECK-RV64-NEXT:    ret void
6179 //
test_vsuxseg4ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)6180 void test_vsuxseg4ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
6181   return vsuxseg4ei8_v_f32m1(base, bindex, v0, v1, v2, v3, vl);
6182 }
6183 
6184 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f32m1(
6185 // CHECK-RV64-NEXT:  entry:
6186 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6187 // CHECK-RV64-NEXT:    ret void
6188 //
test_vsuxseg5ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)6189 void test_vsuxseg5ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
6190   return vsuxseg5ei8_v_f32m1(base, bindex, v0, v1, v2, v3, v4, vl);
6191 }
6192 
6193 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f32m1(
6194 // CHECK-RV64-NEXT:  entry:
6195 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6196 // CHECK-RV64-NEXT:    ret void
6197 //
test_vsuxseg6ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)6198 void test_vsuxseg6ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
6199   return vsuxseg6ei8_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6200 }
6201 
6202 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f32m1(
6203 // CHECK-RV64-NEXT:  entry:
6204 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6205 // CHECK-RV64-NEXT:    ret void
6206 //
test_vsuxseg7ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)6207 void test_vsuxseg7ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
6208   return vsuxseg7ei8_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6209 }
6210 
6211 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f32m1(
6212 // CHECK-RV64-NEXT:  entry:
6213 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6214 // CHECK-RV64-NEXT:    ret void
6215 //
test_vsuxseg8ei8_v_f32m1(float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)6216 void test_vsuxseg8ei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
6217   return vsuxseg8ei8_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6218 }
6219 
6220 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m2(
6221 // CHECK-RV64-NEXT:  entry:
6222 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6223 // CHECK-RV64-NEXT:    ret void
6224 //
test_vsuxseg2ei8_v_f32m2(float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)6225 void test_vsuxseg2ei8_v_f32m2 (float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
6226   return vsuxseg2ei8_v_f32m2(base, bindex, v0, v1, vl);
6227 }
6228 
6229 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32m2(
6230 // CHECK-RV64-NEXT:  entry:
6231 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6232 // CHECK-RV64-NEXT:    ret void
6233 //
test_vsuxseg3ei8_v_f32m2(float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)6234 void test_vsuxseg3ei8_v_f32m2 (float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
6235   return vsuxseg3ei8_v_f32m2(base, bindex, v0, v1, v2, vl);
6236 }
6237 
6238 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32m2(
6239 // CHECK-RV64-NEXT:  entry:
6240 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6241 // CHECK-RV64-NEXT:    ret void
6242 //
test_vsuxseg4ei8_v_f32m2(float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)6243 void test_vsuxseg4ei8_v_f32m2 (float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
6244   return vsuxseg4ei8_v_f32m2(base, bindex, v0, v1, v2, v3, vl);
6245 }
6246 
6247 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m4(
6248 // CHECK-RV64-NEXT:  entry:
6249 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6250 // CHECK-RV64-NEXT:    ret void
6251 //
test_vsuxseg2ei8_v_f32m4(float * base,vuint8m1_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)6252 void test_vsuxseg2ei8_v_f32m4 (float *base, vuint8m1_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
6253   return vsuxseg2ei8_v_f32m4(base, bindex, v0, v1, vl);
6254 }
6255 
6256 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32mf2(
6257 // CHECK-RV64-NEXT:  entry:
6258 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6259 // CHECK-RV64-NEXT:    ret void
6260 //
test_vsuxseg2ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)6261 void test_vsuxseg2ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
6262   return vsuxseg2ei16_v_f32mf2(base, bindex, v0, v1, vl);
6263 }
6264 
6265 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32mf2(
6266 // CHECK-RV64-NEXT:  entry:
6267 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6268 // CHECK-RV64-NEXT:    ret void
6269 //
test_vsuxseg3ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)6270 void test_vsuxseg3ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
6271   return vsuxseg3ei16_v_f32mf2(base, bindex, v0, v1, v2, vl);
6272 }
6273 
6274 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32mf2(
6275 // CHECK-RV64-NEXT:  entry:
6276 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6277 // CHECK-RV64-NEXT:    ret void
6278 //
test_vsuxseg4ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)6279 void test_vsuxseg4ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
6280   return vsuxseg4ei16_v_f32mf2(base, bindex, v0, v1, v2, v3, vl);
6281 }
6282 
6283 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f32mf2(
6284 // CHECK-RV64-NEXT:  entry:
6285 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6286 // CHECK-RV64-NEXT:    ret void
6287 //
test_vsuxseg5ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)6288 void test_vsuxseg5ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
6289   return vsuxseg5ei16_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
6290 }
6291 
6292 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f32mf2(
6293 // CHECK-RV64-NEXT:  entry:
6294 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6295 // CHECK-RV64-NEXT:    ret void
6296 //
test_vsuxseg6ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)6297 void test_vsuxseg6ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
6298   return vsuxseg6ei16_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6299 }
6300 
6301 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f32mf2(
6302 // CHECK-RV64-NEXT:  entry:
6303 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6304 // CHECK-RV64-NEXT:    ret void
6305 //
test_vsuxseg7ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)6306 void test_vsuxseg7ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
6307   return vsuxseg7ei16_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6308 }
6309 
6310 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f32mf2(
6311 // CHECK-RV64-NEXT:  entry:
6312 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6313 // CHECK-RV64-NEXT:    ret void
6314 //
test_vsuxseg8ei16_v_f32mf2(float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)6315 void test_vsuxseg8ei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
6316   return vsuxseg8ei16_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6317 }
6318 
6319 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m1(
6320 // CHECK-RV64-NEXT:  entry:
6321 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6322 // CHECK-RV64-NEXT:    ret void
6323 //
test_vsuxseg2ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)6324 void test_vsuxseg2ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
6325   return vsuxseg2ei16_v_f32m1(base, bindex, v0, v1, vl);
6326 }
6327 
6328 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32m1(
6329 // CHECK-RV64-NEXT:  entry:
6330 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6331 // CHECK-RV64-NEXT:    ret void
6332 //
test_vsuxseg3ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)6333 void test_vsuxseg3ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
6334   return vsuxseg3ei16_v_f32m1(base, bindex, v0, v1, v2, vl);
6335 }
6336 
6337 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32m1(
6338 // CHECK-RV64-NEXT:  entry:
6339 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6340 // CHECK-RV64-NEXT:    ret void
6341 //
test_vsuxseg4ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)6342 void test_vsuxseg4ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
6343   return vsuxseg4ei16_v_f32m1(base, bindex, v0, v1, v2, v3, vl);
6344 }
6345 
6346 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f32m1(
6347 // CHECK-RV64-NEXT:  entry:
6348 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6349 // CHECK-RV64-NEXT:    ret void
6350 //
test_vsuxseg5ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)6351 void test_vsuxseg5ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
6352   return vsuxseg5ei16_v_f32m1(base, bindex, v0, v1, v2, v3, v4, vl);
6353 }
6354 
6355 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f32m1(
6356 // CHECK-RV64-NEXT:  entry:
6357 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6358 // CHECK-RV64-NEXT:    ret void
6359 //
test_vsuxseg6ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)6360 void test_vsuxseg6ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
6361   return vsuxseg6ei16_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6362 }
6363 
6364 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f32m1(
6365 // CHECK-RV64-NEXT:  entry:
6366 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6367 // CHECK-RV64-NEXT:    ret void
6368 //
test_vsuxseg7ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)6369 void test_vsuxseg7ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
6370   return vsuxseg7ei16_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6371 }
6372 
6373 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f32m1(
6374 // CHECK-RV64-NEXT:  entry:
6375 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6376 // CHECK-RV64-NEXT:    ret void
6377 //
test_vsuxseg8ei16_v_f32m1(float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)6378 void test_vsuxseg8ei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
6379   return vsuxseg8ei16_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6380 }
6381 
6382 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m2(
6383 // CHECK-RV64-NEXT:  entry:
6384 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6385 // CHECK-RV64-NEXT:    ret void
6386 //
test_vsuxseg2ei16_v_f32m2(float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)6387 void test_vsuxseg2ei16_v_f32m2 (float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
6388   return vsuxseg2ei16_v_f32m2(base, bindex, v0, v1, vl);
6389 }
6390 
6391 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32m2(
6392 // CHECK-RV64-NEXT:  entry:
6393 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6394 // CHECK-RV64-NEXT:    ret void
6395 //
test_vsuxseg3ei16_v_f32m2(float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)6396 void test_vsuxseg3ei16_v_f32m2 (float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
6397   return vsuxseg3ei16_v_f32m2(base, bindex, v0, v1, v2, vl);
6398 }
6399 
6400 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32m2(
6401 // CHECK-RV64-NEXT:  entry:
6402 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6403 // CHECK-RV64-NEXT:    ret void
6404 //
test_vsuxseg4ei16_v_f32m2(float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)6405 void test_vsuxseg4ei16_v_f32m2 (float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
6406   return vsuxseg4ei16_v_f32m2(base, bindex, v0, v1, v2, v3, vl);
6407 }
6408 
6409 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m4(
6410 // CHECK-RV64-NEXT:  entry:
6411 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6412 // CHECK-RV64-NEXT:    ret void
6413 //
test_vsuxseg2ei16_v_f32m4(float * base,vuint16m2_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)6414 void test_vsuxseg2ei16_v_f32m4 (float *base, vuint16m2_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
6415   return vsuxseg2ei16_v_f32m4(base, bindex, v0, v1, vl);
6416 }
6417 
6418 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32mf2(
6419 // CHECK-RV64-NEXT:  entry:
6420 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6421 // CHECK-RV64-NEXT:    ret void
6422 //
test_vsuxseg2ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)6423 void test_vsuxseg2ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
6424   return vsuxseg2ei32_v_f32mf2(base, bindex, v0, v1, vl);
6425 }
6426 
6427 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32mf2(
6428 // CHECK-RV64-NEXT:  entry:
6429 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6430 // CHECK-RV64-NEXT:    ret void
6431 //
test_vsuxseg3ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)6432 void test_vsuxseg3ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
6433   return vsuxseg3ei32_v_f32mf2(base, bindex, v0, v1, v2, vl);
6434 }
6435 
6436 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32mf2(
6437 // CHECK-RV64-NEXT:  entry:
6438 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6439 // CHECK-RV64-NEXT:    ret void
6440 //
test_vsuxseg4ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)6441 void test_vsuxseg4ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
6442   return vsuxseg4ei32_v_f32mf2(base, bindex, v0, v1, v2, v3, vl);
6443 }
6444 
6445 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f32mf2(
6446 // CHECK-RV64-NEXT:  entry:
6447 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6448 // CHECK-RV64-NEXT:    ret void
6449 //
test_vsuxseg5ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)6450 void test_vsuxseg5ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
6451   return vsuxseg5ei32_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
6452 }
6453 
6454 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f32mf2(
6455 // CHECK-RV64-NEXT:  entry:
6456 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6457 // CHECK-RV64-NEXT:    ret void
6458 //
test_vsuxseg6ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)6459 void test_vsuxseg6ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
6460   return vsuxseg6ei32_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6461 }
6462 
6463 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f32mf2(
6464 // CHECK-RV64-NEXT:  entry:
6465 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6466 // CHECK-RV64-NEXT:    ret void
6467 //
test_vsuxseg7ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)6468 void test_vsuxseg7ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
6469   return vsuxseg7ei32_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6470 }
6471 
6472 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f32mf2(
6473 // CHECK-RV64-NEXT:  entry:
6474 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6475 // CHECK-RV64-NEXT:    ret void
6476 //
test_vsuxseg8ei32_v_f32mf2(float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)6477 void test_vsuxseg8ei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
6478   return vsuxseg8ei32_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6479 }
6480 
6481 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m1(
6482 // CHECK-RV64-NEXT:  entry:
6483 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6484 // CHECK-RV64-NEXT:    ret void
6485 //
test_vsuxseg2ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)6486 void test_vsuxseg2ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
6487   return vsuxseg2ei32_v_f32m1(base, bindex, v0, v1, vl);
6488 }
6489 
6490 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32m1(
6491 // CHECK-RV64-NEXT:  entry:
6492 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6493 // CHECK-RV64-NEXT:    ret void
6494 //
test_vsuxseg3ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)6495 void test_vsuxseg3ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
6496   return vsuxseg3ei32_v_f32m1(base, bindex, v0, v1, v2, vl);
6497 }
6498 
6499 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32m1(
6500 // CHECK-RV64-NEXT:  entry:
6501 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6502 // CHECK-RV64-NEXT:    ret void
6503 //
test_vsuxseg4ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)6504 void test_vsuxseg4ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
6505   return vsuxseg4ei32_v_f32m1(base, bindex, v0, v1, v2, v3, vl);
6506 }
6507 
6508 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f32m1(
6509 // CHECK-RV64-NEXT:  entry:
6510 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6511 // CHECK-RV64-NEXT:    ret void
6512 //
test_vsuxseg5ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)6513 void test_vsuxseg5ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
6514   return vsuxseg5ei32_v_f32m1(base, bindex, v0, v1, v2, v3, v4, vl);
6515 }
6516 
6517 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f32m1(
6518 // CHECK-RV64-NEXT:  entry:
6519 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6520 // CHECK-RV64-NEXT:    ret void
6521 //
test_vsuxseg6ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)6522 void test_vsuxseg6ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
6523   return vsuxseg6ei32_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6524 }
6525 
6526 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f32m1(
6527 // CHECK-RV64-NEXT:  entry:
6528 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6529 // CHECK-RV64-NEXT:    ret void
6530 //
test_vsuxseg7ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)6531 void test_vsuxseg7ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
6532   return vsuxseg7ei32_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6533 }
6534 
6535 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f32m1(
6536 // CHECK-RV64-NEXT:  entry:
6537 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6538 // CHECK-RV64-NEXT:    ret void
6539 //
test_vsuxseg8ei32_v_f32m1(float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)6540 void test_vsuxseg8ei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
6541   return vsuxseg8ei32_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6542 }
6543 
6544 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m2(
6545 // CHECK-RV64-NEXT:  entry:
6546 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6547 // CHECK-RV64-NEXT:    ret void
6548 //
test_vsuxseg2ei32_v_f32m2(float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)6549 void test_vsuxseg2ei32_v_f32m2 (float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
6550   return vsuxseg2ei32_v_f32m2(base, bindex, v0, v1, vl);
6551 }
6552 
6553 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32m2(
6554 // CHECK-RV64-NEXT:  entry:
6555 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6556 // CHECK-RV64-NEXT:    ret void
6557 //
test_vsuxseg3ei32_v_f32m2(float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)6558 void test_vsuxseg3ei32_v_f32m2 (float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
6559   return vsuxseg3ei32_v_f32m2(base, bindex, v0, v1, v2, vl);
6560 }
6561 
6562 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32m2(
6563 // CHECK-RV64-NEXT:  entry:
6564 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6565 // CHECK-RV64-NEXT:    ret void
6566 //
test_vsuxseg4ei32_v_f32m2(float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)6567 void test_vsuxseg4ei32_v_f32m2 (float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
6568   return vsuxseg4ei32_v_f32m2(base, bindex, v0, v1, v2, v3, vl);
6569 }
6570 
6571 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m4(
6572 // CHECK-RV64-NEXT:  entry:
6573 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6574 // CHECK-RV64-NEXT:    ret void
6575 //
test_vsuxseg2ei32_v_f32m4(float * base,vuint32m4_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)6576 void test_vsuxseg2ei32_v_f32m4 (float *base, vuint32m4_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
6577   return vsuxseg2ei32_v_f32m4(base, bindex, v0, v1, vl);
6578 }
6579 
6580 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32mf2(
6581 // CHECK-RV64-NEXT:  entry:
6582 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6583 // CHECK-RV64-NEXT:    ret void
6584 //
test_vsuxseg2ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)6585 void test_vsuxseg2ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
6586   return vsuxseg2ei64_v_f32mf2(base, bindex, v0, v1, vl);
6587 }
6588 
6589 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32mf2(
6590 // CHECK-RV64-NEXT:  entry:
6591 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6592 // CHECK-RV64-NEXT:    ret void
6593 //
test_vsuxseg3ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)6594 void test_vsuxseg3ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
6595   return vsuxseg3ei64_v_f32mf2(base, bindex, v0, v1, v2, vl);
6596 }
6597 
6598 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32mf2(
6599 // CHECK-RV64-NEXT:  entry:
6600 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6601 // CHECK-RV64-NEXT:    ret void
6602 //
test_vsuxseg4ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)6603 void test_vsuxseg4ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
6604   return vsuxseg4ei64_v_f32mf2(base, bindex, v0, v1, v2, v3, vl);
6605 }
6606 
6607 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f32mf2(
6608 // CHECK-RV64-NEXT:  entry:
6609 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6610 // CHECK-RV64-NEXT:    ret void
6611 //
test_vsuxseg5ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)6612 void test_vsuxseg5ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
6613   return vsuxseg5ei64_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, vl);
6614 }
6615 
6616 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f32mf2(
6617 // CHECK-RV64-NEXT:  entry:
6618 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6619 // CHECK-RV64-NEXT:    ret void
6620 //
test_vsuxseg6ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)6621 void test_vsuxseg6ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
6622   return vsuxseg6ei64_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6623 }
6624 
6625 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f32mf2(
6626 // CHECK-RV64-NEXT:  entry:
6627 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6628 // CHECK-RV64-NEXT:    ret void
6629 //
test_vsuxseg7ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)6630 void test_vsuxseg7ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
6631   return vsuxseg7ei64_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6632 }
6633 
6634 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f32mf2(
6635 // CHECK-RV64-NEXT:  entry:
6636 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6637 // CHECK-RV64-NEXT:    ret void
6638 //
test_vsuxseg8ei64_v_f32mf2(float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)6639 void test_vsuxseg8ei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
6640   return vsuxseg8ei64_v_f32mf2(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6641 }
6642 
6643 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m1(
6644 // CHECK-RV64-NEXT:  entry:
6645 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6646 // CHECK-RV64-NEXT:    ret void
6647 //
test_vsuxseg2ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)6648 void test_vsuxseg2ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
6649   return vsuxseg2ei64_v_f32m1(base, bindex, v0, v1, vl);
6650 }
6651 
6652 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32m1(
6653 // CHECK-RV64-NEXT:  entry:
6654 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6655 // CHECK-RV64-NEXT:    ret void
6656 //
test_vsuxseg3ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)6657 void test_vsuxseg3ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
6658   return vsuxseg3ei64_v_f32m1(base, bindex, v0, v1, v2, vl);
6659 }
6660 
6661 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32m1(
6662 // CHECK-RV64-NEXT:  entry:
6663 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6664 // CHECK-RV64-NEXT:    ret void
6665 //
test_vsuxseg4ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)6666 void test_vsuxseg4ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
6667   return vsuxseg4ei64_v_f32m1(base, bindex, v0, v1, v2, v3, vl);
6668 }
6669 
6670 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f32m1(
6671 // CHECK-RV64-NEXT:  entry:
6672 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6673 // CHECK-RV64-NEXT:    ret void
6674 //
test_vsuxseg5ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)6675 void test_vsuxseg5ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
6676   return vsuxseg5ei64_v_f32m1(base, bindex, v0, v1, v2, v3, v4, vl);
6677 }
6678 
6679 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f32m1(
6680 // CHECK-RV64-NEXT:  entry:
6681 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6682 // CHECK-RV64-NEXT:    ret void
6683 //
test_vsuxseg6ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)6684 void test_vsuxseg6ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
6685   return vsuxseg6ei64_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6686 }
6687 
6688 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f32m1(
6689 // CHECK-RV64-NEXT:  entry:
6690 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6691 // CHECK-RV64-NEXT:    ret void
6692 //
test_vsuxseg7ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)6693 void test_vsuxseg7ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
6694   return vsuxseg7ei64_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6695 }
6696 
6697 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f32m1(
6698 // CHECK-RV64-NEXT:  entry:
6699 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6700 // CHECK-RV64-NEXT:    ret void
6701 //
test_vsuxseg8ei64_v_f32m1(float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)6702 void test_vsuxseg8ei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
6703   return vsuxseg8ei64_v_f32m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6704 }
6705 
6706 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m2(
6707 // CHECK-RV64-NEXT:  entry:
6708 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6709 // CHECK-RV64-NEXT:    ret void
6710 //
test_vsuxseg2ei64_v_f32m2(float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)6711 void test_vsuxseg2ei64_v_f32m2 (float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
6712   return vsuxseg2ei64_v_f32m2(base, bindex, v0, v1, vl);
6713 }
6714 
6715 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32m2(
6716 // CHECK-RV64-NEXT:  entry:
6717 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6718 // CHECK-RV64-NEXT:    ret void
6719 //
test_vsuxseg3ei64_v_f32m2(float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)6720 void test_vsuxseg3ei64_v_f32m2 (float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
6721   return vsuxseg3ei64_v_f32m2(base, bindex, v0, v1, v2, vl);
6722 }
6723 
6724 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32m2(
6725 // CHECK-RV64-NEXT:  entry:
6726 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6727 // CHECK-RV64-NEXT:    ret void
6728 //
test_vsuxseg4ei64_v_f32m2(float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)6729 void test_vsuxseg4ei64_v_f32m2 (float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
6730   return vsuxseg4ei64_v_f32m2(base, bindex, v0, v1, v2, v3, vl);
6731 }
6732 
6733 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m4(
6734 // CHECK-RV64-NEXT:  entry:
6735 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
6736 // CHECK-RV64-NEXT:    ret void
6737 //
test_vsuxseg2ei64_v_f32m4(float * base,vuint64m8_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)6738 void test_vsuxseg2ei64_v_f32m4 (float *base, vuint64m8_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
6739   return vsuxseg2ei64_v_f32m4(base, bindex, v0, v1, vl);
6740 }
6741 
6742 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m1(
6743 // CHECK-RV64-NEXT:  entry:
6744 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6745 // CHECK-RV64-NEXT:    ret void
6746 //
test_vsuxseg2ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)6747 void test_vsuxseg2ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
6748   return vsuxseg2ei8_v_f64m1(base, bindex, v0, v1, vl);
6749 }
6750 
6751 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f64m1(
6752 // CHECK-RV64-NEXT:  entry:
6753 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6754 // CHECK-RV64-NEXT:    ret void
6755 //
test_vsuxseg3ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)6756 void test_vsuxseg3ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
6757   return vsuxseg3ei8_v_f64m1(base, bindex, v0, v1, v2, vl);
6758 }
6759 
6760 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f64m1(
6761 // CHECK-RV64-NEXT:  entry:
6762 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6763 // CHECK-RV64-NEXT:    ret void
6764 //
test_vsuxseg4ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)6765 void test_vsuxseg4ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
6766   return vsuxseg4ei8_v_f64m1(base, bindex, v0, v1, v2, v3, vl);
6767 }
6768 
6769 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f64m1(
6770 // CHECK-RV64-NEXT:  entry:
6771 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6772 // CHECK-RV64-NEXT:    ret void
6773 //
test_vsuxseg5ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)6774 void test_vsuxseg5ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
6775   return vsuxseg5ei8_v_f64m1(base, bindex, v0, v1, v2, v3, v4, vl);
6776 }
6777 
6778 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f64m1(
6779 // CHECK-RV64-NEXT:  entry:
6780 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6781 // CHECK-RV64-NEXT:    ret void
6782 //
test_vsuxseg6ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)6783 void test_vsuxseg6ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
6784   return vsuxseg6ei8_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6785 }
6786 
6787 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f64m1(
6788 // CHECK-RV64-NEXT:  entry:
6789 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6790 // CHECK-RV64-NEXT:    ret void
6791 //
test_vsuxseg7ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)6792 void test_vsuxseg7ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
6793   return vsuxseg7ei8_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6794 }
6795 
6796 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f64m1(
6797 // CHECK-RV64-NEXT:  entry:
6798 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6799 // CHECK-RV64-NEXT:    ret void
6800 //
test_vsuxseg8ei8_v_f64m1(double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)6801 void test_vsuxseg8ei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
6802   return vsuxseg8ei8_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6803 }
6804 
6805 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m2(
6806 // CHECK-RV64-NEXT:  entry:
6807 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6808 // CHECK-RV64-NEXT:    ret void
6809 //
test_vsuxseg2ei8_v_f64m2(double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)6810 void test_vsuxseg2ei8_v_f64m2 (double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
6811   return vsuxseg2ei8_v_f64m2(base, bindex, v0, v1, vl);
6812 }
6813 
6814 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f64m2(
6815 // CHECK-RV64-NEXT:  entry:
6816 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6817 // CHECK-RV64-NEXT:    ret void
6818 //
test_vsuxseg3ei8_v_f64m2(double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)6819 void test_vsuxseg3ei8_v_f64m2 (double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
6820   return vsuxseg3ei8_v_f64m2(base, bindex, v0, v1, v2, vl);
6821 }
6822 
6823 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f64m2(
6824 // CHECK-RV64-NEXT:  entry:
6825 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6826 // CHECK-RV64-NEXT:    ret void
6827 //
test_vsuxseg4ei8_v_f64m2(double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)6828 void test_vsuxseg4ei8_v_f64m2 (double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
6829   return vsuxseg4ei8_v_f64m2(base, bindex, v0, v1, v2, v3, vl);
6830 }
6831 
6832 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m4(
6833 // CHECK-RV64-NEXT:  entry:
6834 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], i64 [[VL:%.*]])
6835 // CHECK-RV64-NEXT:    ret void
6836 //
test_vsuxseg2ei8_v_f64m4(double * base,vuint8mf2_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)6837 void test_vsuxseg2ei8_v_f64m4 (double *base, vuint8mf2_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
6838   return vsuxseg2ei8_v_f64m4(base, bindex, v0, v1, vl);
6839 }
6840 
6841 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m1(
6842 // CHECK-RV64-NEXT:  entry:
6843 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6844 // CHECK-RV64-NEXT:    ret void
6845 //
test_vsuxseg2ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)6846 void test_vsuxseg2ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
6847   return vsuxseg2ei16_v_f64m1(base, bindex, v0, v1, vl);
6848 }
6849 
6850 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f64m1(
6851 // CHECK-RV64-NEXT:  entry:
6852 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6853 // CHECK-RV64-NEXT:    ret void
6854 //
test_vsuxseg3ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)6855 void test_vsuxseg3ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
6856   return vsuxseg3ei16_v_f64m1(base, bindex, v0, v1, v2, vl);
6857 }
6858 
6859 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f64m1(
6860 // CHECK-RV64-NEXT:  entry:
6861 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6862 // CHECK-RV64-NEXT:    ret void
6863 //
test_vsuxseg4ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)6864 void test_vsuxseg4ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
6865   return vsuxseg4ei16_v_f64m1(base, bindex, v0, v1, v2, v3, vl);
6866 }
6867 
6868 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f64m1(
6869 // CHECK-RV64-NEXT:  entry:
6870 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6871 // CHECK-RV64-NEXT:    ret void
6872 //
test_vsuxseg5ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)6873 void test_vsuxseg5ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
6874   return vsuxseg5ei16_v_f64m1(base, bindex, v0, v1, v2, v3, v4, vl);
6875 }
6876 
6877 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f64m1(
6878 // CHECK-RV64-NEXT:  entry:
6879 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6880 // CHECK-RV64-NEXT:    ret void
6881 //
test_vsuxseg6ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)6882 void test_vsuxseg6ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
6883   return vsuxseg6ei16_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6884 }
6885 
6886 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f64m1(
6887 // CHECK-RV64-NEXT:  entry:
6888 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6889 // CHECK-RV64-NEXT:    ret void
6890 //
test_vsuxseg7ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)6891 void test_vsuxseg7ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
6892   return vsuxseg7ei16_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6893 }
6894 
6895 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f64m1(
6896 // CHECK-RV64-NEXT:  entry:
6897 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6898 // CHECK-RV64-NEXT:    ret void
6899 //
test_vsuxseg8ei16_v_f64m1(double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)6900 void test_vsuxseg8ei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
6901   return vsuxseg8ei16_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
6902 }
6903 
6904 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m2(
6905 // CHECK-RV64-NEXT:  entry:
6906 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6907 // CHECK-RV64-NEXT:    ret void
6908 //
test_vsuxseg2ei16_v_f64m2(double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)6909 void test_vsuxseg2ei16_v_f64m2 (double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
6910   return vsuxseg2ei16_v_f64m2(base, bindex, v0, v1, vl);
6911 }
6912 
6913 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f64m2(
6914 // CHECK-RV64-NEXT:  entry:
6915 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6916 // CHECK-RV64-NEXT:    ret void
6917 //
test_vsuxseg3ei16_v_f64m2(double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)6918 void test_vsuxseg3ei16_v_f64m2 (double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
6919   return vsuxseg3ei16_v_f64m2(base, bindex, v0, v1, v2, vl);
6920 }
6921 
6922 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f64m2(
6923 // CHECK-RV64-NEXT:  entry:
6924 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6925 // CHECK-RV64-NEXT:    ret void
6926 //
test_vsuxseg4ei16_v_f64m2(double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)6927 void test_vsuxseg4ei16_v_f64m2 (double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
6928   return vsuxseg4ei16_v_f64m2(base, bindex, v0, v1, v2, v3, vl);
6929 }
6930 
6931 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m4(
6932 // CHECK-RV64-NEXT:  entry:
6933 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], i64 [[VL:%.*]])
6934 // CHECK-RV64-NEXT:    ret void
6935 //
test_vsuxseg2ei16_v_f64m4(double * base,vuint16m1_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)6936 void test_vsuxseg2ei16_v_f64m4 (double *base, vuint16m1_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
6937   return vsuxseg2ei16_v_f64m4(base, bindex, v0, v1, vl);
6938 }
6939 
6940 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m1(
6941 // CHECK-RV64-NEXT:  entry:
6942 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6943 // CHECK-RV64-NEXT:    ret void
6944 //
test_vsuxseg2ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)6945 void test_vsuxseg2ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
6946   return vsuxseg2ei32_v_f64m1(base, bindex, v0, v1, vl);
6947 }
6948 
6949 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f64m1(
6950 // CHECK-RV64-NEXT:  entry:
6951 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6952 // CHECK-RV64-NEXT:    ret void
6953 //
test_vsuxseg3ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)6954 void test_vsuxseg3ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
6955   return vsuxseg3ei32_v_f64m1(base, bindex, v0, v1, v2, vl);
6956 }
6957 
6958 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f64m1(
6959 // CHECK-RV64-NEXT:  entry:
6960 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6961 // CHECK-RV64-NEXT:    ret void
6962 //
test_vsuxseg4ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)6963 void test_vsuxseg4ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
6964   return vsuxseg4ei32_v_f64m1(base, bindex, v0, v1, v2, v3, vl);
6965 }
6966 
6967 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f64m1(
6968 // CHECK-RV64-NEXT:  entry:
6969 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6970 // CHECK-RV64-NEXT:    ret void
6971 //
test_vsuxseg5ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)6972 void test_vsuxseg5ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
6973   return vsuxseg5ei32_v_f64m1(base, bindex, v0, v1, v2, v3, v4, vl);
6974 }
6975 
6976 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f64m1(
6977 // CHECK-RV64-NEXT:  entry:
6978 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6979 // CHECK-RV64-NEXT:    ret void
6980 //
test_vsuxseg6ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)6981 void test_vsuxseg6ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
6982   return vsuxseg6ei32_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
6983 }
6984 
6985 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f64m1(
6986 // CHECK-RV64-NEXT:  entry:
6987 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6988 // CHECK-RV64-NEXT:    ret void
6989 //
test_vsuxseg7ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)6990 void test_vsuxseg7ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
6991   return vsuxseg7ei32_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
6992 }
6993 
6994 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f64m1(
6995 // CHECK-RV64-NEXT:  entry:
6996 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
6997 // CHECK-RV64-NEXT:    ret void
6998 //
test_vsuxseg8ei32_v_f64m1(double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)6999 void test_vsuxseg8ei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
7000   return vsuxseg8ei32_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7001 }
7002 
7003 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m2(
7004 // CHECK-RV64-NEXT:  entry:
7005 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
7006 // CHECK-RV64-NEXT:    ret void
7007 //
test_vsuxseg2ei32_v_f64m2(double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)7008 void test_vsuxseg2ei32_v_f64m2 (double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
7009   return vsuxseg2ei32_v_f64m2(base, bindex, v0, v1, vl);
7010 }
7011 
7012 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f64m2(
7013 // CHECK-RV64-NEXT:  entry:
7014 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
7015 // CHECK-RV64-NEXT:    ret void
7016 //
test_vsuxseg3ei32_v_f64m2(double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)7017 void test_vsuxseg3ei32_v_f64m2 (double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
7018   return vsuxseg3ei32_v_f64m2(base, bindex, v0, v1, v2, vl);
7019 }
7020 
7021 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f64m2(
7022 // CHECK-RV64-NEXT:  entry:
7023 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
7024 // CHECK-RV64-NEXT:    ret void
7025 //
test_vsuxseg4ei32_v_f64m2(double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)7026 void test_vsuxseg4ei32_v_f64m2 (double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
7027   return vsuxseg4ei32_v_f64m2(base, bindex, v0, v1, v2, v3, vl);
7028 }
7029 
7030 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m4(
7031 // CHECK-RV64-NEXT:  entry:
7032 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], i64 [[VL:%.*]])
7033 // CHECK-RV64-NEXT:    ret void
7034 //
test_vsuxseg2ei32_v_f64m4(double * base,vuint32m2_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)7035 void test_vsuxseg2ei32_v_f64m4 (double *base, vuint32m2_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
7036   return vsuxseg2ei32_v_f64m4(base, bindex, v0, v1, vl);
7037 }
7038 
7039 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m1(
7040 // CHECK-RV64-NEXT:  entry:
7041 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7042 // CHECK-RV64-NEXT:    ret void
7043 //
test_vsuxseg2ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)7044 void test_vsuxseg2ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
7045   return vsuxseg2ei64_v_f64m1(base, bindex, v0, v1, vl);
7046 }
7047 
7048 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f64m1(
7049 // CHECK-RV64-NEXT:  entry:
7050 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7051 // CHECK-RV64-NEXT:    ret void
7052 //
test_vsuxseg3ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)7053 void test_vsuxseg3ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
7054   return vsuxseg3ei64_v_f64m1(base, bindex, v0, v1, v2, vl);
7055 }
7056 
7057 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f64m1(
7058 // CHECK-RV64-NEXT:  entry:
7059 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7060 // CHECK-RV64-NEXT:    ret void
7061 //
test_vsuxseg4ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)7062 void test_vsuxseg4ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
7063   return vsuxseg4ei64_v_f64m1(base, bindex, v0, v1, v2, v3, vl);
7064 }
7065 
7066 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f64m1(
7067 // CHECK-RV64-NEXT:  entry:
7068 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7069 // CHECK-RV64-NEXT:    ret void
7070 //
test_vsuxseg5ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)7071 void test_vsuxseg5ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
7072   return vsuxseg5ei64_v_f64m1(base, bindex, v0, v1, v2, v3, v4, vl);
7073 }
7074 
7075 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f64m1(
7076 // CHECK-RV64-NEXT:  entry:
7077 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7078 // CHECK-RV64-NEXT:    ret void
7079 //
test_vsuxseg6ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)7080 void test_vsuxseg6ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
7081   return vsuxseg6ei64_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, vl);
7082 }
7083 
7084 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f64m1(
7085 // CHECK-RV64-NEXT:  entry:
7086 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7087 // CHECK-RV64-NEXT:    ret void
7088 //
test_vsuxseg7ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)7089 void test_vsuxseg7ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
7090   return vsuxseg7ei64_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7091 }
7092 
7093 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f64m1(
7094 // CHECK-RV64-NEXT:  entry:
7095 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7096 // CHECK-RV64-NEXT:    ret void
7097 //
test_vsuxseg8ei64_v_f64m1(double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)7098 void test_vsuxseg8ei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
7099   return vsuxseg8ei64_v_f64m1(base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7100 }
7101 
7102 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m2(
7103 // CHECK-RV64-NEXT:  entry:
7104 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7105 // CHECK-RV64-NEXT:    ret void
7106 //
test_vsuxseg2ei64_v_f64m2(double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)7107 void test_vsuxseg2ei64_v_f64m2 (double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
7108   return vsuxseg2ei64_v_f64m2(base, bindex, v0, v1, vl);
7109 }
7110 
7111 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f64m2(
7112 // CHECK-RV64-NEXT:  entry:
7113 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7114 // CHECK-RV64-NEXT:    ret void
7115 //
test_vsuxseg3ei64_v_f64m2(double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)7116 void test_vsuxseg3ei64_v_f64m2 (double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
7117   return vsuxseg3ei64_v_f64m2(base, bindex, v0, v1, v2, vl);
7118 }
7119 
7120 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f64m2(
7121 // CHECK-RV64-NEXT:  entry:
7122 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7123 // CHECK-RV64-NEXT:    ret void
7124 //
test_vsuxseg4ei64_v_f64m2(double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)7125 void test_vsuxseg4ei64_v_f64m2 (double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
7126   return vsuxseg4ei64_v_f64m2(base, bindex, v0, v1, v2, v3, vl);
7127 }
7128 
7129 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m4(
7130 // CHECK-RV64-NEXT:  entry:
7131 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], i64 [[VL:%.*]])
7132 // CHECK-RV64-NEXT:    ret void
7133 //
test_vsuxseg2ei64_v_f64m4(double * base,vuint64m4_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)7134 void test_vsuxseg2ei64_v_f64m4 (double *base, vuint64m4_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
7135   return vsuxseg2ei64_v_f64m4(base, bindex, v0, v1, vl);
7136 }
7137 
7138 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf8_m(
7139 // CHECK-RV64-NEXT:  entry:
7140 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7141 // CHECK-RV64-NEXT:    ret void
7142 //
test_vsuxseg2ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)7143 void test_vsuxseg2ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
7144   return vsuxseg2ei8_v_i8mf8_m(mask, base, bindex, v0, v1, vl);
7145 }
7146 
7147 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf8_m(
7148 // CHECK-RV64-NEXT:  entry:
7149 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7150 // CHECK-RV64-NEXT:    ret void
7151 //
test_vsuxseg3ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)7152 void test_vsuxseg3ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
7153   return vsuxseg3ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, vl);
7154 }
7155 
7156 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf8_m(
7157 // CHECK-RV64-NEXT:  entry:
7158 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7159 // CHECK-RV64-NEXT:    ret void
7160 //
test_vsuxseg4ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)7161 void test_vsuxseg4ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
7162   return vsuxseg4ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
7163 }
7164 
7165 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf8_m(
7166 // CHECK-RV64-NEXT:  entry:
7167 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7168 // CHECK-RV64-NEXT:    ret void
7169 //
test_vsuxseg5ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)7170 void test_vsuxseg5ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
7171   return vsuxseg5ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7172 }
7173 
7174 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf8_m(
7175 // CHECK-RV64-NEXT:  entry:
7176 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7177 // CHECK-RV64-NEXT:    ret void
7178 //
test_vsuxseg6ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)7179 void test_vsuxseg6ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
7180   return vsuxseg6ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7181 }
7182 
7183 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf8_m(
7184 // CHECK-RV64-NEXT:  entry:
7185 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7186 // CHECK-RV64-NEXT:    ret void
7187 //
test_vsuxseg7ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)7188 void test_vsuxseg7ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
7189   return vsuxseg7ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7190 }
7191 
7192 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf8_m(
7193 // CHECK-RV64-NEXT:  entry:
7194 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7195 // CHECK-RV64-NEXT:    ret void
7196 //
test_vsuxseg8ei8_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint8mf8_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)7197 void test_vsuxseg8ei8_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint8mf8_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
7198   return vsuxseg8ei8_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7199 }
7200 
7201 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf4_m(
7202 // CHECK-RV64-NEXT:  entry:
7203 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7204 // CHECK-RV64-NEXT:    ret void
7205 //
test_vsuxseg2ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)7206 void test_vsuxseg2ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
7207   return vsuxseg2ei8_v_i8mf4_m(mask, base, bindex, v0, v1, vl);
7208 }
7209 
7210 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf4_m(
7211 // CHECK-RV64-NEXT:  entry:
7212 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7213 // CHECK-RV64-NEXT:    ret void
7214 //
test_vsuxseg3ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)7215 void test_vsuxseg3ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
7216   return vsuxseg3ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, vl);
7217 }
7218 
7219 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf4_m(
7220 // CHECK-RV64-NEXT:  entry:
7221 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7222 // CHECK-RV64-NEXT:    ret void
7223 //
test_vsuxseg4ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)7224 void test_vsuxseg4ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
7225   return vsuxseg4ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
7226 }
7227 
7228 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf4_m(
7229 // CHECK-RV64-NEXT:  entry:
7230 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7231 // CHECK-RV64-NEXT:    ret void
7232 //
test_vsuxseg5ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)7233 void test_vsuxseg5ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
7234   return vsuxseg5ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7235 }
7236 
7237 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf4_m(
7238 // CHECK-RV64-NEXT:  entry:
7239 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7240 // CHECK-RV64-NEXT:    ret void
7241 //
test_vsuxseg6ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)7242 void test_vsuxseg6ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
7243   return vsuxseg6ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7244 }
7245 
7246 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf4_m(
7247 // CHECK-RV64-NEXT:  entry:
7248 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7249 // CHECK-RV64-NEXT:    ret void
7250 //
test_vsuxseg7ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)7251 void test_vsuxseg7ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
7252   return vsuxseg7ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7253 }
7254 
7255 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf4_m(
7256 // CHECK-RV64-NEXT:  entry:
7257 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7258 // CHECK-RV64-NEXT:    ret void
7259 //
test_vsuxseg8ei8_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint8mf4_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)7260 void test_vsuxseg8ei8_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint8mf4_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
7261   return vsuxseg8ei8_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7262 }
7263 
7264 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8mf2_m(
7265 // CHECK-RV64-NEXT:  entry:
7266 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7267 // CHECK-RV64-NEXT:    ret void
7268 //
test_vsuxseg2ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)7269 void test_vsuxseg2ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
7270   return vsuxseg2ei8_v_i8mf2_m(mask, base, bindex, v0, v1, vl);
7271 }
7272 
7273 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8mf2_m(
7274 // CHECK-RV64-NEXT:  entry:
7275 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7276 // CHECK-RV64-NEXT:    ret void
7277 //
test_vsuxseg3ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)7278 void test_vsuxseg3ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
7279   return vsuxseg3ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, vl);
7280 }
7281 
7282 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8mf2_m(
7283 // CHECK-RV64-NEXT:  entry:
7284 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7285 // CHECK-RV64-NEXT:    ret void
7286 //
test_vsuxseg4ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)7287 void test_vsuxseg4ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
7288   return vsuxseg4ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7289 }
7290 
7291 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8mf2_m(
7292 // CHECK-RV64-NEXT:  entry:
7293 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7294 // CHECK-RV64-NEXT:    ret void
7295 //
test_vsuxseg5ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)7296 void test_vsuxseg5ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
7297   return vsuxseg5ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7298 }
7299 
7300 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8mf2_m(
7301 // CHECK-RV64-NEXT:  entry:
7302 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7303 // CHECK-RV64-NEXT:    ret void
7304 //
test_vsuxseg6ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)7305 void test_vsuxseg6ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
7306   return vsuxseg6ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7307 }
7308 
7309 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8mf2_m(
7310 // CHECK-RV64-NEXT:  entry:
7311 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7312 // CHECK-RV64-NEXT:    ret void
7313 //
test_vsuxseg7ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)7314 void test_vsuxseg7ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
7315   return vsuxseg7ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7316 }
7317 
7318 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8mf2_m(
7319 // CHECK-RV64-NEXT:  entry:
7320 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7321 // CHECK-RV64-NEXT:    ret void
7322 //
test_vsuxseg8ei8_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint8mf2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)7323 void test_vsuxseg8ei8_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint8mf2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
7324   return vsuxseg8ei8_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7325 }
7326 
7327 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m1_m(
7328 // CHECK-RV64-NEXT:  entry:
7329 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7330 // CHECK-RV64-NEXT:    ret void
7331 //
test_vsuxseg2ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)7332 void test_vsuxseg2ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
7333   return vsuxseg2ei8_v_i8m1_m(mask, base, bindex, v0, v1, vl);
7334 }
7335 
7336 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8m1_m(
7337 // CHECK-RV64-NEXT:  entry:
7338 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7339 // CHECK-RV64-NEXT:    ret void
7340 //
test_vsuxseg3ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)7341 void test_vsuxseg3ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
7342   return vsuxseg3ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, vl);
7343 }
7344 
7345 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8m1_m(
7346 // CHECK-RV64-NEXT:  entry:
7347 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7348 // CHECK-RV64-NEXT:    ret void
7349 //
test_vsuxseg4ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)7350 void test_vsuxseg4ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
7351   return vsuxseg4ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
7352 }
7353 
7354 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i8m1_m(
7355 // CHECK-RV64-NEXT:  entry:
7356 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7357 // CHECK-RV64-NEXT:    ret void
7358 //
test_vsuxseg5ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)7359 void test_vsuxseg5ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
7360   return vsuxseg5ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7361 }
7362 
7363 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i8m1_m(
7364 // CHECK-RV64-NEXT:  entry:
7365 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7366 // CHECK-RV64-NEXT:    ret void
7367 //
test_vsuxseg6ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)7368 void test_vsuxseg6ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
7369   return vsuxseg6ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7370 }
7371 
7372 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i8m1_m(
7373 // CHECK-RV64-NEXT:  entry:
7374 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7375 // CHECK-RV64-NEXT:    ret void
7376 //
test_vsuxseg7ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)7377 void test_vsuxseg7ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
7378   return vsuxseg7ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7379 }
7380 
7381 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i8m1_m(
7382 // CHECK-RV64-NEXT:  entry:
7383 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7384 // CHECK-RV64-NEXT:    ret void
7385 //
test_vsuxseg8ei8_v_i8m1_m(vbool8_t mask,int8_t * base,vuint8m1_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)7386 void test_vsuxseg8ei8_v_i8m1_m (vbool8_t mask, int8_t *base, vuint8m1_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
7387   return vsuxseg8ei8_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7388 }
7389 
7390 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m2_m(
7391 // CHECK-RV64-NEXT:  entry:
7392 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7393 // CHECK-RV64-NEXT:    ret void
7394 //
test_vsuxseg2ei8_v_i8m2_m(vbool4_t mask,int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)7395 void test_vsuxseg2ei8_v_i8m2_m (vbool4_t mask, int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
7396   return vsuxseg2ei8_v_i8m2_m(mask, base, bindex, v0, v1, vl);
7397 }
7398 
7399 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i8m2_m(
7400 // CHECK-RV64-NEXT:  entry:
7401 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7402 // CHECK-RV64-NEXT:    ret void
7403 //
test_vsuxseg3ei8_v_i8m2_m(vbool4_t mask,int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)7404 void test_vsuxseg3ei8_v_i8m2_m (vbool4_t mask, int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
7405   return vsuxseg3ei8_v_i8m2_m(mask, base, bindex, v0, v1, v2, vl);
7406 }
7407 
7408 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i8m2_m(
7409 // CHECK-RV64-NEXT:  entry:
7410 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7411 // CHECK-RV64-NEXT:    ret void
7412 //
test_vsuxseg4ei8_v_i8m2_m(vbool4_t mask,int8_t * base,vuint8m2_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)7413 void test_vsuxseg4ei8_v_i8m2_m (vbool4_t mask, int8_t *base, vuint8m2_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
7414   return vsuxseg4ei8_v_i8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7415 }
7416 
7417 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i8m4_m(
7418 // CHECK-RV64-NEXT:  entry:
7419 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7420 // CHECK-RV64-NEXT:    ret void
7421 //
test_vsuxseg2ei8_v_i8m4_m(vbool2_t mask,int8_t * base,vuint8m4_t bindex,vint8m4_t v0,vint8m4_t v1,size_t vl)7422 void test_vsuxseg2ei8_v_i8m4_m (vbool2_t mask, int8_t *base, vuint8m4_t bindex, vint8m4_t v0, vint8m4_t v1, size_t vl) {
7423   return vsuxseg2ei8_v_i8m4_m(mask, base, bindex, v0, v1, vl);
7424 }
7425 
7426 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf8_m(
7427 // CHECK-RV64-NEXT:  entry:
7428 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7429 // CHECK-RV64-NEXT:    ret void
7430 //
test_vsuxseg2ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)7431 void test_vsuxseg2ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
7432   return vsuxseg2ei16_v_i8mf8_m(mask, base, bindex, v0, v1, vl);
7433 }
7434 
7435 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf8_m(
7436 // CHECK-RV64-NEXT:  entry:
7437 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7438 // CHECK-RV64-NEXT:    ret void
7439 //
test_vsuxseg3ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)7440 void test_vsuxseg3ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
7441   return vsuxseg3ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, vl);
7442 }
7443 
7444 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf8_m(
7445 // CHECK-RV64-NEXT:  entry:
7446 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7447 // CHECK-RV64-NEXT:    ret void
7448 //
test_vsuxseg4ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)7449 void test_vsuxseg4ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
7450   return vsuxseg4ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
7451 }
7452 
7453 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf8_m(
7454 // CHECK-RV64-NEXT:  entry:
7455 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7456 // CHECK-RV64-NEXT:    ret void
7457 //
test_vsuxseg5ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)7458 void test_vsuxseg5ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
7459   return vsuxseg5ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7460 }
7461 
7462 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf8_m(
7463 // CHECK-RV64-NEXT:  entry:
7464 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7465 // CHECK-RV64-NEXT:    ret void
7466 //
test_vsuxseg6ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)7467 void test_vsuxseg6ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
7468   return vsuxseg6ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7469 }
7470 
7471 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf8_m(
7472 // CHECK-RV64-NEXT:  entry:
7473 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7474 // CHECK-RV64-NEXT:    ret void
7475 //
test_vsuxseg7ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)7476 void test_vsuxseg7ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
7477   return vsuxseg7ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7478 }
7479 
7480 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf8_m(
7481 // CHECK-RV64-NEXT:  entry:
7482 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7483 // CHECK-RV64-NEXT:    ret void
7484 //
test_vsuxseg8ei16_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint16mf4_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)7485 void test_vsuxseg8ei16_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
7486   return vsuxseg8ei16_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7487 }
7488 
7489 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf4_m(
7490 // CHECK-RV64-NEXT:  entry:
7491 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7492 // CHECK-RV64-NEXT:    ret void
7493 //
test_vsuxseg2ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)7494 void test_vsuxseg2ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
7495   return vsuxseg2ei16_v_i8mf4_m(mask, base, bindex, v0, v1, vl);
7496 }
7497 
7498 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf4_m(
7499 // CHECK-RV64-NEXT:  entry:
7500 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7501 // CHECK-RV64-NEXT:    ret void
7502 //
test_vsuxseg3ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)7503 void test_vsuxseg3ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
7504   return vsuxseg3ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, vl);
7505 }
7506 
7507 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf4_m(
7508 // CHECK-RV64-NEXT:  entry:
7509 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7510 // CHECK-RV64-NEXT:    ret void
7511 //
test_vsuxseg4ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)7512 void test_vsuxseg4ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
7513   return vsuxseg4ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
7514 }
7515 
7516 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf4_m(
7517 // CHECK-RV64-NEXT:  entry:
7518 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7519 // CHECK-RV64-NEXT:    ret void
7520 //
test_vsuxseg5ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)7521 void test_vsuxseg5ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
7522   return vsuxseg5ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7523 }
7524 
7525 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf4_m(
7526 // CHECK-RV64-NEXT:  entry:
7527 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7528 // CHECK-RV64-NEXT:    ret void
7529 //
test_vsuxseg6ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)7530 void test_vsuxseg6ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
7531   return vsuxseg6ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7532 }
7533 
7534 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf4_m(
7535 // CHECK-RV64-NEXT:  entry:
7536 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7537 // CHECK-RV64-NEXT:    ret void
7538 //
test_vsuxseg7ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)7539 void test_vsuxseg7ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
7540   return vsuxseg7ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7541 }
7542 
7543 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf4_m(
7544 // CHECK-RV64-NEXT:  entry:
7545 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7546 // CHECK-RV64-NEXT:    ret void
7547 //
test_vsuxseg8ei16_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint16mf2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)7548 void test_vsuxseg8ei16_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
7549   return vsuxseg8ei16_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7550 }
7551 
7552 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8mf2_m(
7553 // CHECK-RV64-NEXT:  entry:
7554 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7555 // CHECK-RV64-NEXT:    ret void
7556 //
test_vsuxseg2ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)7557 void test_vsuxseg2ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
7558   return vsuxseg2ei16_v_i8mf2_m(mask, base, bindex, v0, v1, vl);
7559 }
7560 
7561 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8mf2_m(
7562 // CHECK-RV64-NEXT:  entry:
7563 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7564 // CHECK-RV64-NEXT:    ret void
7565 //
test_vsuxseg3ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)7566 void test_vsuxseg3ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
7567   return vsuxseg3ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, vl);
7568 }
7569 
7570 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8mf2_m(
7571 // CHECK-RV64-NEXT:  entry:
7572 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7573 // CHECK-RV64-NEXT:    ret void
7574 //
test_vsuxseg4ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)7575 void test_vsuxseg4ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
7576   return vsuxseg4ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7577 }
7578 
7579 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8mf2_m(
7580 // CHECK-RV64-NEXT:  entry:
7581 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7582 // CHECK-RV64-NEXT:    ret void
7583 //
test_vsuxseg5ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)7584 void test_vsuxseg5ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
7585   return vsuxseg5ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7586 }
7587 
7588 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8mf2_m(
7589 // CHECK-RV64-NEXT:  entry:
7590 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7591 // CHECK-RV64-NEXT:    ret void
7592 //
test_vsuxseg6ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)7593 void test_vsuxseg6ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
7594   return vsuxseg6ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7595 }
7596 
7597 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8mf2_m(
7598 // CHECK-RV64-NEXT:  entry:
7599 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7600 // CHECK-RV64-NEXT:    ret void
7601 //
test_vsuxseg7ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)7602 void test_vsuxseg7ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
7603   return vsuxseg7ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7604 }
7605 
7606 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8mf2_m(
7607 // CHECK-RV64-NEXT:  entry:
7608 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7609 // CHECK-RV64-NEXT:    ret void
7610 //
test_vsuxseg8ei16_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint16m1_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)7611 void test_vsuxseg8ei16_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
7612   return vsuxseg8ei16_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7613 }
7614 
7615 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m1_m(
7616 // CHECK-RV64-NEXT:  entry:
7617 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7618 // CHECK-RV64-NEXT:    ret void
7619 //
test_vsuxseg2ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)7620 void test_vsuxseg2ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
7621   return vsuxseg2ei16_v_i8m1_m(mask, base, bindex, v0, v1, vl);
7622 }
7623 
7624 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8m1_m(
7625 // CHECK-RV64-NEXT:  entry:
7626 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7627 // CHECK-RV64-NEXT:    ret void
7628 //
test_vsuxseg3ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)7629 void test_vsuxseg3ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
7630   return vsuxseg3ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, vl);
7631 }
7632 
7633 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8m1_m(
7634 // CHECK-RV64-NEXT:  entry:
7635 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7636 // CHECK-RV64-NEXT:    ret void
7637 //
test_vsuxseg4ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)7638 void test_vsuxseg4ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
7639   return vsuxseg4ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
7640 }
7641 
7642 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i8m1_m(
7643 // CHECK-RV64-NEXT:  entry:
7644 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7645 // CHECK-RV64-NEXT:    ret void
7646 //
test_vsuxseg5ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)7647 void test_vsuxseg5ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
7648   return vsuxseg5ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7649 }
7650 
7651 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i8m1_m(
7652 // CHECK-RV64-NEXT:  entry:
7653 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7654 // CHECK-RV64-NEXT:    ret void
7655 //
test_vsuxseg6ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)7656 void test_vsuxseg6ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
7657   return vsuxseg6ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7658 }
7659 
7660 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i8m1_m(
7661 // CHECK-RV64-NEXT:  entry:
7662 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7663 // CHECK-RV64-NEXT:    ret void
7664 //
test_vsuxseg7ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)7665 void test_vsuxseg7ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
7666   return vsuxseg7ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7667 }
7668 
7669 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i8m1_m(
7670 // CHECK-RV64-NEXT:  entry:
7671 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7672 // CHECK-RV64-NEXT:    ret void
7673 //
test_vsuxseg8ei16_v_i8m1_m(vbool8_t mask,int8_t * base,vuint16m2_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)7674 void test_vsuxseg8ei16_v_i8m1_m (vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
7675   return vsuxseg8ei16_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7676 }
7677 
7678 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m2_m(
7679 // CHECK-RV64-NEXT:  entry:
7680 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7681 // CHECK-RV64-NEXT:    ret void
7682 //
test_vsuxseg2ei16_v_i8m2_m(vbool4_t mask,int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)7683 void test_vsuxseg2ei16_v_i8m2_m (vbool4_t mask, int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
7684   return vsuxseg2ei16_v_i8m2_m(mask, base, bindex, v0, v1, vl);
7685 }
7686 
7687 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i8m2_m(
7688 // CHECK-RV64-NEXT:  entry:
7689 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7690 // CHECK-RV64-NEXT:    ret void
7691 //
test_vsuxseg3ei16_v_i8m2_m(vbool4_t mask,int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)7692 void test_vsuxseg3ei16_v_i8m2_m (vbool4_t mask, int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
7693   return vsuxseg3ei16_v_i8m2_m(mask, base, bindex, v0, v1, v2, vl);
7694 }
7695 
7696 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i8m2_m(
7697 // CHECK-RV64-NEXT:  entry:
7698 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7699 // CHECK-RV64-NEXT:    ret void
7700 //
test_vsuxseg4ei16_v_i8m2_m(vbool4_t mask,int8_t * base,vuint16m4_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)7701 void test_vsuxseg4ei16_v_i8m2_m (vbool4_t mask, int8_t *base, vuint16m4_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
7702   return vsuxseg4ei16_v_i8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7703 }
7704 
7705 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i8m4_m(
7706 // CHECK-RV64-NEXT:  entry:
7707 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7708 // CHECK-RV64-NEXT:    ret void
7709 //
test_vsuxseg2ei16_v_i8m4_m(vbool2_t mask,int8_t * base,vuint16m8_t bindex,vint8m4_t v0,vint8m4_t v1,size_t vl)7710 void test_vsuxseg2ei16_v_i8m4_m (vbool2_t mask, int8_t *base, vuint16m8_t bindex, vint8m4_t v0, vint8m4_t v1, size_t vl) {
7711   return vsuxseg2ei16_v_i8m4_m(mask, base, bindex, v0, v1, vl);
7712 }
7713 
7714 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf8_m(
7715 // CHECK-RV64-NEXT:  entry:
7716 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7717 // CHECK-RV64-NEXT:    ret void
7718 //
test_vsuxseg2ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)7719 void test_vsuxseg2ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
7720   return vsuxseg2ei32_v_i8mf8_m(mask, base, bindex, v0, v1, vl);
7721 }
7722 
7723 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf8_m(
7724 // CHECK-RV64-NEXT:  entry:
7725 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7726 // CHECK-RV64-NEXT:    ret void
7727 //
test_vsuxseg3ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)7728 void test_vsuxseg3ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
7729   return vsuxseg3ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, vl);
7730 }
7731 
7732 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf8_m(
7733 // CHECK-RV64-NEXT:  entry:
7734 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7735 // CHECK-RV64-NEXT:    ret void
7736 //
test_vsuxseg4ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)7737 void test_vsuxseg4ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
7738   return vsuxseg4ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
7739 }
7740 
7741 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf8_m(
7742 // CHECK-RV64-NEXT:  entry:
7743 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7744 // CHECK-RV64-NEXT:    ret void
7745 //
test_vsuxseg5ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)7746 void test_vsuxseg5ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
7747   return vsuxseg5ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7748 }
7749 
7750 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf8_m(
7751 // CHECK-RV64-NEXT:  entry:
7752 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7753 // CHECK-RV64-NEXT:    ret void
7754 //
test_vsuxseg6ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)7755 void test_vsuxseg6ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
7756   return vsuxseg6ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7757 }
7758 
7759 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf8_m(
7760 // CHECK-RV64-NEXT:  entry:
7761 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7762 // CHECK-RV64-NEXT:    ret void
7763 //
test_vsuxseg7ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)7764 void test_vsuxseg7ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
7765   return vsuxseg7ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7766 }
7767 
7768 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf8_m(
7769 // CHECK-RV64-NEXT:  entry:
7770 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7771 // CHECK-RV64-NEXT:    ret void
7772 //
test_vsuxseg8ei32_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint32mf2_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)7773 void test_vsuxseg8ei32_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint32mf2_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
7774   return vsuxseg8ei32_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7775 }
7776 
7777 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf4_m(
7778 // CHECK-RV64-NEXT:  entry:
7779 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7780 // CHECK-RV64-NEXT:    ret void
7781 //
test_vsuxseg2ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)7782 void test_vsuxseg2ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
7783   return vsuxseg2ei32_v_i8mf4_m(mask, base, bindex, v0, v1, vl);
7784 }
7785 
7786 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf4_m(
7787 // CHECK-RV64-NEXT:  entry:
7788 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7789 // CHECK-RV64-NEXT:    ret void
7790 //
test_vsuxseg3ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)7791 void test_vsuxseg3ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
7792   return vsuxseg3ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, vl);
7793 }
7794 
7795 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf4_m(
7796 // CHECK-RV64-NEXT:  entry:
7797 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7798 // CHECK-RV64-NEXT:    ret void
7799 //
test_vsuxseg4ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)7800 void test_vsuxseg4ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
7801   return vsuxseg4ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
7802 }
7803 
7804 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf4_m(
7805 // CHECK-RV64-NEXT:  entry:
7806 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7807 // CHECK-RV64-NEXT:    ret void
7808 //
test_vsuxseg5ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)7809 void test_vsuxseg5ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
7810   return vsuxseg5ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7811 }
7812 
7813 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf4_m(
7814 // CHECK-RV64-NEXT:  entry:
7815 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7816 // CHECK-RV64-NEXT:    ret void
7817 //
test_vsuxseg6ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)7818 void test_vsuxseg6ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
7819   return vsuxseg6ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7820 }
7821 
7822 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf4_m(
7823 // CHECK-RV64-NEXT:  entry:
7824 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7825 // CHECK-RV64-NEXT:    ret void
7826 //
test_vsuxseg7ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)7827 void test_vsuxseg7ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
7828   return vsuxseg7ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7829 }
7830 
7831 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf4_m(
7832 // CHECK-RV64-NEXT:  entry:
7833 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7834 // CHECK-RV64-NEXT:    ret void
7835 //
test_vsuxseg8ei32_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint32m1_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)7836 void test_vsuxseg8ei32_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint32m1_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
7837   return vsuxseg8ei32_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7838 }
7839 
7840 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8mf2_m(
7841 // CHECK-RV64-NEXT:  entry:
7842 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7843 // CHECK-RV64-NEXT:    ret void
7844 //
test_vsuxseg2ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)7845 void test_vsuxseg2ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
7846   return vsuxseg2ei32_v_i8mf2_m(mask, base, bindex, v0, v1, vl);
7847 }
7848 
7849 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8mf2_m(
7850 // CHECK-RV64-NEXT:  entry:
7851 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7852 // CHECK-RV64-NEXT:    ret void
7853 //
test_vsuxseg3ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)7854 void test_vsuxseg3ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
7855   return vsuxseg3ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, vl);
7856 }
7857 
7858 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8mf2_m(
7859 // CHECK-RV64-NEXT:  entry:
7860 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7861 // CHECK-RV64-NEXT:    ret void
7862 //
test_vsuxseg4ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)7863 void test_vsuxseg4ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
7864   return vsuxseg4ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7865 }
7866 
7867 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8mf2_m(
7868 // CHECK-RV64-NEXT:  entry:
7869 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7870 // CHECK-RV64-NEXT:    ret void
7871 //
test_vsuxseg5ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)7872 void test_vsuxseg5ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
7873   return vsuxseg5ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7874 }
7875 
7876 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8mf2_m(
7877 // CHECK-RV64-NEXT:  entry:
7878 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7879 // CHECK-RV64-NEXT:    ret void
7880 //
test_vsuxseg6ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)7881 void test_vsuxseg6ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
7882   return vsuxseg6ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7883 }
7884 
7885 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8mf2_m(
7886 // CHECK-RV64-NEXT:  entry:
7887 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7888 // CHECK-RV64-NEXT:    ret void
7889 //
test_vsuxseg7ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)7890 void test_vsuxseg7ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
7891   return vsuxseg7ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7892 }
7893 
7894 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8mf2_m(
7895 // CHECK-RV64-NEXT:  entry:
7896 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7897 // CHECK-RV64-NEXT:    ret void
7898 //
test_vsuxseg8ei32_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint32m2_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)7899 void test_vsuxseg8ei32_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint32m2_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
7900   return vsuxseg8ei32_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7901 }
7902 
7903 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8m1_m(
7904 // CHECK-RV64-NEXT:  entry:
7905 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7906 // CHECK-RV64-NEXT:    ret void
7907 //
test_vsuxseg2ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)7908 void test_vsuxseg2ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
7909   return vsuxseg2ei32_v_i8m1_m(mask, base, bindex, v0, v1, vl);
7910 }
7911 
7912 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8m1_m(
7913 // CHECK-RV64-NEXT:  entry:
7914 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7915 // CHECK-RV64-NEXT:    ret void
7916 //
test_vsuxseg3ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)7917 void test_vsuxseg3ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
7918   return vsuxseg3ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, vl);
7919 }
7920 
7921 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8m1_m(
7922 // CHECK-RV64-NEXT:  entry:
7923 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7924 // CHECK-RV64-NEXT:    ret void
7925 //
test_vsuxseg4ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)7926 void test_vsuxseg4ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
7927   return vsuxseg4ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
7928 }
7929 
7930 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i8m1_m(
7931 // CHECK-RV64-NEXT:  entry:
7932 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7933 // CHECK-RV64-NEXT:    ret void
7934 //
test_vsuxseg5ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)7935 void test_vsuxseg5ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
7936   return vsuxseg5ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
7937 }
7938 
7939 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i8m1_m(
7940 // CHECK-RV64-NEXT:  entry:
7941 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7942 // CHECK-RV64-NEXT:    ret void
7943 //
test_vsuxseg6ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)7944 void test_vsuxseg6ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
7945   return vsuxseg6ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
7946 }
7947 
7948 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i8m1_m(
7949 // CHECK-RV64-NEXT:  entry:
7950 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7951 // CHECK-RV64-NEXT:    ret void
7952 //
test_vsuxseg7ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)7953 void test_vsuxseg7ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
7954   return vsuxseg7ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
7955 }
7956 
7957 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i8m1_m(
7958 // CHECK-RV64-NEXT:  entry:
7959 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7960 // CHECK-RV64-NEXT:    ret void
7961 //
test_vsuxseg8ei32_v_i8m1_m(vbool8_t mask,int8_t * base,vuint32m4_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)7962 void test_vsuxseg8ei32_v_i8m1_m (vbool8_t mask, int8_t *base, vuint32m4_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
7963   return vsuxseg8ei32_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
7964 }
7965 
7966 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i8m2_m(
7967 // CHECK-RV64-NEXT:  entry:
7968 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7969 // CHECK-RV64-NEXT:    ret void
7970 //
test_vsuxseg2ei32_v_i8m2_m(vbool4_t mask,int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,size_t vl)7971 void test_vsuxseg2ei32_v_i8m2_m (vbool4_t mask, int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, size_t vl) {
7972   return vsuxseg2ei32_v_i8m2_m(mask, base, bindex, v0, v1, vl);
7973 }
7974 
7975 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i8m2_m(
7976 // CHECK-RV64-NEXT:  entry:
7977 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7978 // CHECK-RV64-NEXT:    ret void
7979 //
test_vsuxseg3ei32_v_i8m2_m(vbool4_t mask,int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,size_t vl)7980 void test_vsuxseg3ei32_v_i8m2_m (vbool4_t mask, int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, size_t vl) {
7981   return vsuxseg3ei32_v_i8m2_m(mask, base, bindex, v0, v1, v2, vl);
7982 }
7983 
7984 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i8m2_m(
7985 // CHECK-RV64-NEXT:  entry:
7986 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7987 // CHECK-RV64-NEXT:    ret void
7988 //
test_vsuxseg4ei32_v_i8m2_m(vbool4_t mask,int8_t * base,vuint32m8_t bindex,vint8m2_t v0,vint8m2_t v1,vint8m2_t v2,vint8m2_t v3,size_t vl)7989 void test_vsuxseg4ei32_v_i8m2_m (vbool4_t mask, int8_t *base, vuint32m8_t bindex, vint8m2_t v0, vint8m2_t v1, vint8m2_t v2, vint8m2_t v3, size_t vl) {
7990   return vsuxseg4ei32_v_i8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
7991 }
7992 
7993 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf8_m(
7994 // CHECK-RV64-NEXT:  entry:
7995 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
7996 // CHECK-RV64-NEXT:    ret void
7997 //
test_vsuxseg2ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,size_t vl)7998 void test_vsuxseg2ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, size_t vl) {
7999   return vsuxseg2ei64_v_i8mf8_m(mask, base, bindex, v0, v1, vl);
8000 }
8001 
8002 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf8_m(
8003 // CHECK-RV64-NEXT:  entry:
8004 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8005 // CHECK-RV64-NEXT:    ret void
8006 //
test_vsuxseg3ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,size_t vl)8007 void test_vsuxseg3ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, size_t vl) {
8008   return vsuxseg3ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, vl);
8009 }
8010 
8011 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf8_m(
8012 // CHECK-RV64-NEXT:  entry:
8013 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8014 // CHECK-RV64-NEXT:    ret void
8015 //
test_vsuxseg4ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,size_t vl)8016 void test_vsuxseg4ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, size_t vl) {
8017   return vsuxseg4ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
8018 }
8019 
8020 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf8_m(
8021 // CHECK-RV64-NEXT:  entry:
8022 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8023 // CHECK-RV64-NEXT:    ret void
8024 //
test_vsuxseg5ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,size_t vl)8025 void test_vsuxseg5ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, size_t vl) {
8026   return vsuxseg5ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8027 }
8028 
8029 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf8_m(
8030 // CHECK-RV64-NEXT:  entry:
8031 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8032 // CHECK-RV64-NEXT:    ret void
8033 //
test_vsuxseg6ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,size_t vl)8034 void test_vsuxseg6ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, size_t vl) {
8035   return vsuxseg6ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8036 }
8037 
8038 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf8_m(
8039 // CHECK-RV64-NEXT:  entry:
8040 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8041 // CHECK-RV64-NEXT:    ret void
8042 //
test_vsuxseg7ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,size_t vl)8043 void test_vsuxseg7ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, size_t vl) {
8044   return vsuxseg7ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8045 }
8046 
8047 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf8_m(
8048 // CHECK-RV64-NEXT:  entry:
8049 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8050 // CHECK-RV64-NEXT:    ret void
8051 //
test_vsuxseg8ei64_v_i8mf8_m(vbool64_t mask,int8_t * base,vuint64m1_t bindex,vint8mf8_t v0,vint8mf8_t v1,vint8mf8_t v2,vint8mf8_t v3,vint8mf8_t v4,vint8mf8_t v5,vint8mf8_t v6,vint8mf8_t v7,size_t vl)8052 void test_vsuxseg8ei64_v_i8mf8_m (vbool64_t mask, int8_t *base, vuint64m1_t bindex, vint8mf8_t v0, vint8mf8_t v1, vint8mf8_t v2, vint8mf8_t v3, vint8mf8_t v4, vint8mf8_t v5, vint8mf8_t v6, vint8mf8_t v7, size_t vl) {
8053   return vsuxseg8ei64_v_i8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8054 }
8055 
8056 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf4_m(
8057 // CHECK-RV64-NEXT:  entry:
8058 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8059 // CHECK-RV64-NEXT:    ret void
8060 //
test_vsuxseg2ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,size_t vl)8061 void test_vsuxseg2ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, size_t vl) {
8062   return vsuxseg2ei64_v_i8mf4_m(mask, base, bindex, v0, v1, vl);
8063 }
8064 
8065 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf4_m(
8066 // CHECK-RV64-NEXT:  entry:
8067 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8068 // CHECK-RV64-NEXT:    ret void
8069 //
test_vsuxseg3ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,size_t vl)8070 void test_vsuxseg3ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, size_t vl) {
8071   return vsuxseg3ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, vl);
8072 }
8073 
8074 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf4_m(
8075 // CHECK-RV64-NEXT:  entry:
8076 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8077 // CHECK-RV64-NEXT:    ret void
8078 //
test_vsuxseg4ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,size_t vl)8079 void test_vsuxseg4ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, size_t vl) {
8080   return vsuxseg4ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
8081 }
8082 
8083 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf4_m(
8084 // CHECK-RV64-NEXT:  entry:
8085 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8086 // CHECK-RV64-NEXT:    ret void
8087 //
test_vsuxseg5ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,size_t vl)8088 void test_vsuxseg5ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, size_t vl) {
8089   return vsuxseg5ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8090 }
8091 
8092 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf4_m(
8093 // CHECK-RV64-NEXT:  entry:
8094 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8095 // CHECK-RV64-NEXT:    ret void
8096 //
test_vsuxseg6ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,size_t vl)8097 void test_vsuxseg6ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, size_t vl) {
8098   return vsuxseg6ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8099 }
8100 
8101 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf4_m(
8102 // CHECK-RV64-NEXT:  entry:
8103 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8104 // CHECK-RV64-NEXT:    ret void
8105 //
test_vsuxseg7ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,size_t vl)8106 void test_vsuxseg7ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, size_t vl) {
8107   return vsuxseg7ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8108 }
8109 
8110 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf4_m(
8111 // CHECK-RV64-NEXT:  entry:
8112 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8113 // CHECK-RV64-NEXT:    ret void
8114 //
test_vsuxseg8ei64_v_i8mf4_m(vbool32_t mask,int8_t * base,vuint64m2_t bindex,vint8mf4_t v0,vint8mf4_t v1,vint8mf4_t v2,vint8mf4_t v3,vint8mf4_t v4,vint8mf4_t v5,vint8mf4_t v6,vint8mf4_t v7,size_t vl)8115 void test_vsuxseg8ei64_v_i8mf4_m (vbool32_t mask, int8_t *base, vuint64m2_t bindex, vint8mf4_t v0, vint8mf4_t v1, vint8mf4_t v2, vint8mf4_t v3, vint8mf4_t v4, vint8mf4_t v5, vint8mf4_t v6, vint8mf4_t v7, size_t vl) {
8116   return vsuxseg8ei64_v_i8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8117 }
8118 
8119 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8mf2_m(
8120 // CHECK-RV64-NEXT:  entry:
8121 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8122 // CHECK-RV64-NEXT:    ret void
8123 //
test_vsuxseg2ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,size_t vl)8124 void test_vsuxseg2ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, size_t vl) {
8125   return vsuxseg2ei64_v_i8mf2_m(mask, base, bindex, v0, v1, vl);
8126 }
8127 
8128 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8mf2_m(
8129 // CHECK-RV64-NEXT:  entry:
8130 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8131 // CHECK-RV64-NEXT:    ret void
8132 //
test_vsuxseg3ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,size_t vl)8133 void test_vsuxseg3ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, size_t vl) {
8134   return vsuxseg3ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, vl);
8135 }
8136 
8137 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8mf2_m(
8138 // CHECK-RV64-NEXT:  entry:
8139 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8140 // CHECK-RV64-NEXT:    ret void
8141 //
test_vsuxseg4ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,size_t vl)8142 void test_vsuxseg4ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, size_t vl) {
8143   return vsuxseg4ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8144 }
8145 
8146 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8mf2_m(
8147 // CHECK-RV64-NEXT:  entry:
8148 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8149 // CHECK-RV64-NEXT:    ret void
8150 //
test_vsuxseg5ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,size_t vl)8151 void test_vsuxseg5ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, size_t vl) {
8152   return vsuxseg5ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8153 }
8154 
8155 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8mf2_m(
8156 // CHECK-RV64-NEXT:  entry:
8157 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8158 // CHECK-RV64-NEXT:    ret void
8159 //
test_vsuxseg6ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,size_t vl)8160 void test_vsuxseg6ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, size_t vl) {
8161   return vsuxseg6ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8162 }
8163 
8164 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8mf2_m(
8165 // CHECK-RV64-NEXT:  entry:
8166 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8167 // CHECK-RV64-NEXT:    ret void
8168 //
test_vsuxseg7ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,size_t vl)8169 void test_vsuxseg7ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, size_t vl) {
8170   return vsuxseg7ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8171 }
8172 
8173 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8mf2_m(
8174 // CHECK-RV64-NEXT:  entry:
8175 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8176 // CHECK-RV64-NEXT:    ret void
8177 //
test_vsuxseg8ei64_v_i8mf2_m(vbool16_t mask,int8_t * base,vuint64m4_t bindex,vint8mf2_t v0,vint8mf2_t v1,vint8mf2_t v2,vint8mf2_t v3,vint8mf2_t v4,vint8mf2_t v5,vint8mf2_t v6,vint8mf2_t v7,size_t vl)8178 void test_vsuxseg8ei64_v_i8mf2_m (vbool16_t mask, int8_t *base, vuint64m4_t bindex, vint8mf2_t v0, vint8mf2_t v1, vint8mf2_t v2, vint8mf2_t v3, vint8mf2_t v4, vint8mf2_t v5, vint8mf2_t v6, vint8mf2_t v7, size_t vl) {
8179   return vsuxseg8ei64_v_i8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8180 }
8181 
8182 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i8m1_m(
8183 // CHECK-RV64-NEXT:  entry:
8184 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8185 // CHECK-RV64-NEXT:    ret void
8186 //
test_vsuxseg2ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,size_t vl)8187 void test_vsuxseg2ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, size_t vl) {
8188   return vsuxseg2ei64_v_i8m1_m(mask, base, bindex, v0, v1, vl);
8189 }
8190 
8191 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i8m1_m(
8192 // CHECK-RV64-NEXT:  entry:
8193 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8194 // CHECK-RV64-NEXT:    ret void
8195 //
test_vsuxseg3ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,size_t vl)8196 void test_vsuxseg3ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, size_t vl) {
8197   return vsuxseg3ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, vl);
8198 }
8199 
8200 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i8m1_m(
8201 // CHECK-RV64-NEXT:  entry:
8202 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8203 // CHECK-RV64-NEXT:    ret void
8204 //
test_vsuxseg4ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,size_t vl)8205 void test_vsuxseg4ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, size_t vl) {
8206   return vsuxseg4ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
8207 }
8208 
8209 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i8m1_m(
8210 // CHECK-RV64-NEXT:  entry:
8211 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8212 // CHECK-RV64-NEXT:    ret void
8213 //
test_vsuxseg5ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,size_t vl)8214 void test_vsuxseg5ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, size_t vl) {
8215   return vsuxseg5ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8216 }
8217 
8218 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i8m1_m(
8219 // CHECK-RV64-NEXT:  entry:
8220 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8221 // CHECK-RV64-NEXT:    ret void
8222 //
test_vsuxseg6ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,size_t vl)8223 void test_vsuxseg6ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, size_t vl) {
8224   return vsuxseg6ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8225 }
8226 
8227 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i8m1_m(
8228 // CHECK-RV64-NEXT:  entry:
8229 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8230 // CHECK-RV64-NEXT:    ret void
8231 //
test_vsuxseg7ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,size_t vl)8232 void test_vsuxseg7ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, size_t vl) {
8233   return vsuxseg7ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8234 }
8235 
8236 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i8m1_m(
8237 // CHECK-RV64-NEXT:  entry:
8238 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8239 // CHECK-RV64-NEXT:    ret void
8240 //
test_vsuxseg8ei64_v_i8m1_m(vbool8_t mask,int8_t * base,vuint64m8_t bindex,vint8m1_t v0,vint8m1_t v1,vint8m1_t v2,vint8m1_t v3,vint8m1_t v4,vint8m1_t v5,vint8m1_t v6,vint8m1_t v7,size_t vl)8241 void test_vsuxseg8ei64_v_i8m1_m (vbool8_t mask, int8_t *base, vuint64m8_t bindex, vint8m1_t v0, vint8m1_t v1, vint8m1_t v2, vint8m1_t v3, vint8m1_t v4, vint8m1_t v5, vint8m1_t v6, vint8m1_t v7, size_t vl) {
8242   return vsuxseg8ei64_v_i8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8243 }
8244 
8245 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16mf4_m(
8246 // CHECK-RV64-NEXT:  entry:
8247 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8248 // CHECK-RV64-NEXT:    ret void
8249 //
test_vsuxseg2ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)8250 void test_vsuxseg2ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
8251   return vsuxseg2ei8_v_i16mf4_m(mask, base, bindex, v0, v1, vl);
8252 }
8253 
8254 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16mf4_m(
8255 // CHECK-RV64-NEXT:  entry:
8256 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8257 // CHECK-RV64-NEXT:    ret void
8258 //
test_vsuxseg3ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)8259 void test_vsuxseg3ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
8260   return vsuxseg3ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, vl);
8261 }
8262 
8263 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16mf4_m(
8264 // CHECK-RV64-NEXT:  entry:
8265 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8266 // CHECK-RV64-NEXT:    ret void
8267 //
test_vsuxseg4ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)8268 void test_vsuxseg4ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
8269   return vsuxseg4ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
8270 }
8271 
8272 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16mf4_m(
8273 // CHECK-RV64-NEXT:  entry:
8274 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8275 // CHECK-RV64-NEXT:    ret void
8276 //
test_vsuxseg5ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)8277 void test_vsuxseg5ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
8278   return vsuxseg5ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8279 }
8280 
8281 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16mf4_m(
8282 // CHECK-RV64-NEXT:  entry:
8283 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8284 // CHECK-RV64-NEXT:    ret void
8285 //
test_vsuxseg6ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)8286 void test_vsuxseg6ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
8287   return vsuxseg6ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8288 }
8289 
8290 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16mf4_m(
8291 // CHECK-RV64-NEXT:  entry:
8292 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8293 // CHECK-RV64-NEXT:    ret void
8294 //
test_vsuxseg7ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)8295 void test_vsuxseg7ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
8296   return vsuxseg7ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8297 }
8298 
8299 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16mf4_m(
8300 // CHECK-RV64-NEXT:  entry:
8301 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8302 // CHECK-RV64-NEXT:    ret void
8303 //
test_vsuxseg8ei8_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint8mf8_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)8304 void test_vsuxseg8ei8_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint8mf8_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
8305   return vsuxseg8ei8_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8306 }
8307 
8308 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16mf2_m(
8309 // CHECK-RV64-NEXT:  entry:
8310 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8311 // CHECK-RV64-NEXT:    ret void
8312 //
test_vsuxseg2ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)8313 void test_vsuxseg2ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
8314   return vsuxseg2ei8_v_i16mf2_m(mask, base, bindex, v0, v1, vl);
8315 }
8316 
8317 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16mf2_m(
8318 // CHECK-RV64-NEXT:  entry:
8319 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8320 // CHECK-RV64-NEXT:    ret void
8321 //
test_vsuxseg3ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)8322 void test_vsuxseg3ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
8323   return vsuxseg3ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, vl);
8324 }
8325 
8326 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16mf2_m(
8327 // CHECK-RV64-NEXT:  entry:
8328 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8329 // CHECK-RV64-NEXT:    ret void
8330 //
test_vsuxseg4ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)8331 void test_vsuxseg4ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
8332   return vsuxseg4ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8333 }
8334 
8335 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16mf2_m(
8336 // CHECK-RV64-NEXT:  entry:
8337 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8338 // CHECK-RV64-NEXT:    ret void
8339 //
test_vsuxseg5ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)8340 void test_vsuxseg5ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
8341   return vsuxseg5ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8342 }
8343 
8344 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16mf2_m(
8345 // CHECK-RV64-NEXT:  entry:
8346 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8347 // CHECK-RV64-NEXT:    ret void
8348 //
test_vsuxseg6ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)8349 void test_vsuxseg6ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
8350   return vsuxseg6ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8351 }
8352 
8353 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16mf2_m(
8354 // CHECK-RV64-NEXT:  entry:
8355 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8356 // CHECK-RV64-NEXT:    ret void
8357 //
test_vsuxseg7ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)8358 void test_vsuxseg7ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
8359   return vsuxseg7ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8360 }
8361 
8362 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16mf2_m(
8363 // CHECK-RV64-NEXT:  entry:
8364 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8365 // CHECK-RV64-NEXT:    ret void
8366 //
test_vsuxseg8ei8_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint8mf4_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)8367 void test_vsuxseg8ei8_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint8mf4_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
8368   return vsuxseg8ei8_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8369 }
8370 
8371 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m1_m(
8372 // CHECK-RV64-NEXT:  entry:
8373 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8374 // CHECK-RV64-NEXT:    ret void
8375 //
test_vsuxseg2ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)8376 void test_vsuxseg2ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
8377   return vsuxseg2ei8_v_i16m1_m(mask, base, bindex, v0, v1, vl);
8378 }
8379 
8380 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16m1_m(
8381 // CHECK-RV64-NEXT:  entry:
8382 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8383 // CHECK-RV64-NEXT:    ret void
8384 //
test_vsuxseg3ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)8385 void test_vsuxseg3ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
8386   return vsuxseg3ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, vl);
8387 }
8388 
8389 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16m1_m(
8390 // CHECK-RV64-NEXT:  entry:
8391 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8392 // CHECK-RV64-NEXT:    ret void
8393 //
test_vsuxseg4ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)8394 void test_vsuxseg4ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
8395   return vsuxseg4ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
8396 }
8397 
8398 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i16m1_m(
8399 // CHECK-RV64-NEXT:  entry:
8400 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8401 // CHECK-RV64-NEXT:    ret void
8402 //
test_vsuxseg5ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)8403 void test_vsuxseg5ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
8404   return vsuxseg5ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8405 }
8406 
8407 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i16m1_m(
8408 // CHECK-RV64-NEXT:  entry:
8409 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8410 // CHECK-RV64-NEXT:    ret void
8411 //
test_vsuxseg6ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)8412 void test_vsuxseg6ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
8413   return vsuxseg6ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8414 }
8415 
8416 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i16m1_m(
8417 // CHECK-RV64-NEXT:  entry:
8418 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8419 // CHECK-RV64-NEXT:    ret void
8420 //
test_vsuxseg7ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)8421 void test_vsuxseg7ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
8422   return vsuxseg7ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8423 }
8424 
8425 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i16m1_m(
8426 // CHECK-RV64-NEXT:  entry:
8427 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8428 // CHECK-RV64-NEXT:    ret void
8429 //
test_vsuxseg8ei8_v_i16m1_m(vbool16_t mask,int16_t * base,vuint8mf2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)8430 void test_vsuxseg8ei8_v_i16m1_m (vbool16_t mask, int16_t *base, vuint8mf2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
8431   return vsuxseg8ei8_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8432 }
8433 
8434 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m2_m(
8435 // CHECK-RV64-NEXT:  entry:
8436 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8437 // CHECK-RV64-NEXT:    ret void
8438 //
test_vsuxseg2ei8_v_i16m2_m(vbool8_t mask,int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)8439 void test_vsuxseg2ei8_v_i16m2_m (vbool8_t mask, int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
8440   return vsuxseg2ei8_v_i16m2_m(mask, base, bindex, v0, v1, vl);
8441 }
8442 
8443 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i16m2_m(
8444 // CHECK-RV64-NEXT:  entry:
8445 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8446 // CHECK-RV64-NEXT:    ret void
8447 //
test_vsuxseg3ei8_v_i16m2_m(vbool8_t mask,int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)8448 void test_vsuxseg3ei8_v_i16m2_m (vbool8_t mask, int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
8449   return vsuxseg3ei8_v_i16m2_m(mask, base, bindex, v0, v1, v2, vl);
8450 }
8451 
8452 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i16m2_m(
8453 // CHECK-RV64-NEXT:  entry:
8454 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8455 // CHECK-RV64-NEXT:    ret void
8456 //
test_vsuxseg4ei8_v_i16m2_m(vbool8_t mask,int16_t * base,vuint8m1_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)8457 void test_vsuxseg4ei8_v_i16m2_m (vbool8_t mask, int16_t *base, vuint8m1_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
8458   return vsuxseg4ei8_v_i16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8459 }
8460 
8461 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i16m4_m(
8462 // CHECK-RV64-NEXT:  entry:
8463 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8464 // CHECK-RV64-NEXT:    ret void
8465 //
test_vsuxseg2ei8_v_i16m4_m(vbool4_t mask,int16_t * base,vuint8m2_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)8466 void test_vsuxseg2ei8_v_i16m4_m (vbool4_t mask, int16_t *base, vuint8m2_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
8467   return vsuxseg2ei8_v_i16m4_m(mask, base, bindex, v0, v1, vl);
8468 }
8469 
8470 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16mf4_m(
8471 // CHECK-RV64-NEXT:  entry:
8472 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8473 // CHECK-RV64-NEXT:    ret void
8474 //
test_vsuxseg2ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)8475 void test_vsuxseg2ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
8476   return vsuxseg2ei16_v_i16mf4_m(mask, base, bindex, v0, v1, vl);
8477 }
8478 
8479 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16mf4_m(
8480 // CHECK-RV64-NEXT:  entry:
8481 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8482 // CHECK-RV64-NEXT:    ret void
8483 //
test_vsuxseg3ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)8484 void test_vsuxseg3ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
8485   return vsuxseg3ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, vl);
8486 }
8487 
8488 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16mf4_m(
8489 // CHECK-RV64-NEXT:  entry:
8490 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8491 // CHECK-RV64-NEXT:    ret void
8492 //
test_vsuxseg4ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)8493 void test_vsuxseg4ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
8494   return vsuxseg4ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
8495 }
8496 
8497 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16mf4_m(
8498 // CHECK-RV64-NEXT:  entry:
8499 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8500 // CHECK-RV64-NEXT:    ret void
8501 //
test_vsuxseg5ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)8502 void test_vsuxseg5ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
8503   return vsuxseg5ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8504 }
8505 
8506 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16mf4_m(
8507 // CHECK-RV64-NEXT:  entry:
8508 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8509 // CHECK-RV64-NEXT:    ret void
8510 //
test_vsuxseg6ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)8511 void test_vsuxseg6ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
8512   return vsuxseg6ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8513 }
8514 
8515 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16mf4_m(
8516 // CHECK-RV64-NEXT:  entry:
8517 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8518 // CHECK-RV64-NEXT:    ret void
8519 //
test_vsuxseg7ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)8520 void test_vsuxseg7ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
8521   return vsuxseg7ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8522 }
8523 
8524 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16mf4_m(
8525 // CHECK-RV64-NEXT:  entry:
8526 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8527 // CHECK-RV64-NEXT:    ret void
8528 //
test_vsuxseg8ei16_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint16mf4_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)8529 void test_vsuxseg8ei16_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
8530   return vsuxseg8ei16_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8531 }
8532 
8533 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16mf2_m(
8534 // CHECK-RV64-NEXT:  entry:
8535 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8536 // CHECK-RV64-NEXT:    ret void
8537 //
test_vsuxseg2ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)8538 void test_vsuxseg2ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
8539   return vsuxseg2ei16_v_i16mf2_m(mask, base, bindex, v0, v1, vl);
8540 }
8541 
8542 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16mf2_m(
8543 // CHECK-RV64-NEXT:  entry:
8544 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8545 // CHECK-RV64-NEXT:    ret void
8546 //
test_vsuxseg3ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)8547 void test_vsuxseg3ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
8548   return vsuxseg3ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, vl);
8549 }
8550 
8551 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16mf2_m(
8552 // CHECK-RV64-NEXT:  entry:
8553 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8554 // CHECK-RV64-NEXT:    ret void
8555 //
test_vsuxseg4ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)8556 void test_vsuxseg4ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
8557   return vsuxseg4ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8558 }
8559 
8560 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16mf2_m(
8561 // CHECK-RV64-NEXT:  entry:
8562 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8563 // CHECK-RV64-NEXT:    ret void
8564 //
test_vsuxseg5ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)8565 void test_vsuxseg5ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
8566   return vsuxseg5ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8567 }
8568 
8569 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16mf2_m(
8570 // CHECK-RV64-NEXT:  entry:
8571 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8572 // CHECK-RV64-NEXT:    ret void
8573 //
test_vsuxseg6ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)8574 void test_vsuxseg6ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
8575   return vsuxseg6ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8576 }
8577 
8578 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16mf2_m(
8579 // CHECK-RV64-NEXT:  entry:
8580 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8581 // CHECK-RV64-NEXT:    ret void
8582 //
test_vsuxseg7ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)8583 void test_vsuxseg7ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
8584   return vsuxseg7ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8585 }
8586 
8587 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16mf2_m(
8588 // CHECK-RV64-NEXT:  entry:
8589 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8590 // CHECK-RV64-NEXT:    ret void
8591 //
test_vsuxseg8ei16_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint16mf2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)8592 void test_vsuxseg8ei16_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
8593   return vsuxseg8ei16_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8594 }
8595 
8596 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m1_m(
8597 // CHECK-RV64-NEXT:  entry:
8598 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8599 // CHECK-RV64-NEXT:    ret void
8600 //
test_vsuxseg2ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)8601 void test_vsuxseg2ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
8602   return vsuxseg2ei16_v_i16m1_m(mask, base, bindex, v0, v1, vl);
8603 }
8604 
8605 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16m1_m(
8606 // CHECK-RV64-NEXT:  entry:
8607 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8608 // CHECK-RV64-NEXT:    ret void
8609 //
test_vsuxseg3ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)8610 void test_vsuxseg3ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
8611   return vsuxseg3ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, vl);
8612 }
8613 
8614 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16m1_m(
8615 // CHECK-RV64-NEXT:  entry:
8616 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8617 // CHECK-RV64-NEXT:    ret void
8618 //
test_vsuxseg4ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)8619 void test_vsuxseg4ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
8620   return vsuxseg4ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
8621 }
8622 
8623 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i16m1_m(
8624 // CHECK-RV64-NEXT:  entry:
8625 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8626 // CHECK-RV64-NEXT:    ret void
8627 //
test_vsuxseg5ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)8628 void test_vsuxseg5ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
8629   return vsuxseg5ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8630 }
8631 
8632 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i16m1_m(
8633 // CHECK-RV64-NEXT:  entry:
8634 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8635 // CHECK-RV64-NEXT:    ret void
8636 //
test_vsuxseg6ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)8637 void test_vsuxseg6ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
8638   return vsuxseg6ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8639 }
8640 
8641 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i16m1_m(
8642 // CHECK-RV64-NEXT:  entry:
8643 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8644 // CHECK-RV64-NEXT:    ret void
8645 //
test_vsuxseg7ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)8646 void test_vsuxseg7ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
8647   return vsuxseg7ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8648 }
8649 
8650 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i16m1_m(
8651 // CHECK-RV64-NEXT:  entry:
8652 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8653 // CHECK-RV64-NEXT:    ret void
8654 //
test_vsuxseg8ei16_v_i16m1_m(vbool16_t mask,int16_t * base,vuint16m1_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)8655 void test_vsuxseg8ei16_v_i16m1_m (vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
8656   return vsuxseg8ei16_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8657 }
8658 
8659 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m2_m(
8660 // CHECK-RV64-NEXT:  entry:
8661 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8662 // CHECK-RV64-NEXT:    ret void
8663 //
test_vsuxseg2ei16_v_i16m2_m(vbool8_t mask,int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)8664 void test_vsuxseg2ei16_v_i16m2_m (vbool8_t mask, int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
8665   return vsuxseg2ei16_v_i16m2_m(mask, base, bindex, v0, v1, vl);
8666 }
8667 
8668 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i16m2_m(
8669 // CHECK-RV64-NEXT:  entry:
8670 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8671 // CHECK-RV64-NEXT:    ret void
8672 //
test_vsuxseg3ei16_v_i16m2_m(vbool8_t mask,int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)8673 void test_vsuxseg3ei16_v_i16m2_m (vbool8_t mask, int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
8674   return vsuxseg3ei16_v_i16m2_m(mask, base, bindex, v0, v1, v2, vl);
8675 }
8676 
8677 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i16m2_m(
8678 // CHECK-RV64-NEXT:  entry:
8679 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8680 // CHECK-RV64-NEXT:    ret void
8681 //
test_vsuxseg4ei16_v_i16m2_m(vbool8_t mask,int16_t * base,vuint16m2_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)8682 void test_vsuxseg4ei16_v_i16m2_m (vbool8_t mask, int16_t *base, vuint16m2_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
8683   return vsuxseg4ei16_v_i16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8684 }
8685 
8686 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i16m4_m(
8687 // CHECK-RV64-NEXT:  entry:
8688 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8689 // CHECK-RV64-NEXT:    ret void
8690 //
test_vsuxseg2ei16_v_i16m4_m(vbool4_t mask,int16_t * base,vuint16m4_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)8691 void test_vsuxseg2ei16_v_i16m4_m (vbool4_t mask, int16_t *base, vuint16m4_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
8692   return vsuxseg2ei16_v_i16m4_m(mask, base, bindex, v0, v1, vl);
8693 }
8694 
8695 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16mf4_m(
8696 // CHECK-RV64-NEXT:  entry:
8697 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8698 // CHECK-RV64-NEXT:    ret void
8699 //
test_vsuxseg2ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)8700 void test_vsuxseg2ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
8701   return vsuxseg2ei32_v_i16mf4_m(mask, base, bindex, v0, v1, vl);
8702 }
8703 
8704 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16mf4_m(
8705 // CHECK-RV64-NEXT:  entry:
8706 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8707 // CHECK-RV64-NEXT:    ret void
8708 //
test_vsuxseg3ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)8709 void test_vsuxseg3ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
8710   return vsuxseg3ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, vl);
8711 }
8712 
8713 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16mf4_m(
8714 // CHECK-RV64-NEXT:  entry:
8715 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8716 // CHECK-RV64-NEXT:    ret void
8717 //
test_vsuxseg4ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)8718 void test_vsuxseg4ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
8719   return vsuxseg4ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
8720 }
8721 
8722 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16mf4_m(
8723 // CHECK-RV64-NEXT:  entry:
8724 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8725 // CHECK-RV64-NEXT:    ret void
8726 //
test_vsuxseg5ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)8727 void test_vsuxseg5ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
8728   return vsuxseg5ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8729 }
8730 
8731 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16mf4_m(
8732 // CHECK-RV64-NEXT:  entry:
8733 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8734 // CHECK-RV64-NEXT:    ret void
8735 //
test_vsuxseg6ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)8736 void test_vsuxseg6ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
8737   return vsuxseg6ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8738 }
8739 
8740 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16mf4_m(
8741 // CHECK-RV64-NEXT:  entry:
8742 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8743 // CHECK-RV64-NEXT:    ret void
8744 //
test_vsuxseg7ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)8745 void test_vsuxseg7ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
8746   return vsuxseg7ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8747 }
8748 
8749 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16mf4_m(
8750 // CHECK-RV64-NEXT:  entry:
8751 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8752 // CHECK-RV64-NEXT:    ret void
8753 //
test_vsuxseg8ei32_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint32mf2_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)8754 void test_vsuxseg8ei32_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint32mf2_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
8755   return vsuxseg8ei32_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8756 }
8757 
8758 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16mf2_m(
8759 // CHECK-RV64-NEXT:  entry:
8760 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8761 // CHECK-RV64-NEXT:    ret void
8762 //
test_vsuxseg2ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)8763 void test_vsuxseg2ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
8764   return vsuxseg2ei32_v_i16mf2_m(mask, base, bindex, v0, v1, vl);
8765 }
8766 
8767 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16mf2_m(
8768 // CHECK-RV64-NEXT:  entry:
8769 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8770 // CHECK-RV64-NEXT:    ret void
8771 //
test_vsuxseg3ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)8772 void test_vsuxseg3ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
8773   return vsuxseg3ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, vl);
8774 }
8775 
8776 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16mf2_m(
8777 // CHECK-RV64-NEXT:  entry:
8778 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8779 // CHECK-RV64-NEXT:    ret void
8780 //
test_vsuxseg4ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)8781 void test_vsuxseg4ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
8782   return vsuxseg4ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8783 }
8784 
8785 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16mf2_m(
8786 // CHECK-RV64-NEXT:  entry:
8787 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8788 // CHECK-RV64-NEXT:    ret void
8789 //
test_vsuxseg5ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)8790 void test_vsuxseg5ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
8791   return vsuxseg5ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8792 }
8793 
8794 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16mf2_m(
8795 // CHECK-RV64-NEXT:  entry:
8796 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8797 // CHECK-RV64-NEXT:    ret void
8798 //
test_vsuxseg6ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)8799 void test_vsuxseg6ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
8800   return vsuxseg6ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8801 }
8802 
8803 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16mf2_m(
8804 // CHECK-RV64-NEXT:  entry:
8805 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8806 // CHECK-RV64-NEXT:    ret void
8807 //
test_vsuxseg7ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)8808 void test_vsuxseg7ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
8809   return vsuxseg7ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8810 }
8811 
8812 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16mf2_m(
8813 // CHECK-RV64-NEXT:  entry:
8814 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8815 // CHECK-RV64-NEXT:    ret void
8816 //
test_vsuxseg8ei32_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint32m1_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)8817 void test_vsuxseg8ei32_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint32m1_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
8818   return vsuxseg8ei32_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8819 }
8820 
8821 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m1_m(
8822 // CHECK-RV64-NEXT:  entry:
8823 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8824 // CHECK-RV64-NEXT:    ret void
8825 //
test_vsuxseg2ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)8826 void test_vsuxseg2ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
8827   return vsuxseg2ei32_v_i16m1_m(mask, base, bindex, v0, v1, vl);
8828 }
8829 
8830 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16m1_m(
8831 // CHECK-RV64-NEXT:  entry:
8832 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8833 // CHECK-RV64-NEXT:    ret void
8834 //
test_vsuxseg3ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)8835 void test_vsuxseg3ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
8836   return vsuxseg3ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, vl);
8837 }
8838 
8839 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16m1_m(
8840 // CHECK-RV64-NEXT:  entry:
8841 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8842 // CHECK-RV64-NEXT:    ret void
8843 //
test_vsuxseg4ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)8844 void test_vsuxseg4ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
8845   return vsuxseg4ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
8846 }
8847 
8848 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i16m1_m(
8849 // CHECK-RV64-NEXT:  entry:
8850 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8851 // CHECK-RV64-NEXT:    ret void
8852 //
test_vsuxseg5ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)8853 void test_vsuxseg5ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
8854   return vsuxseg5ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8855 }
8856 
8857 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i16m1_m(
8858 // CHECK-RV64-NEXT:  entry:
8859 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8860 // CHECK-RV64-NEXT:    ret void
8861 //
test_vsuxseg6ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)8862 void test_vsuxseg6ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
8863   return vsuxseg6ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8864 }
8865 
8866 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i16m1_m(
8867 // CHECK-RV64-NEXT:  entry:
8868 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8869 // CHECK-RV64-NEXT:    ret void
8870 //
test_vsuxseg7ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)8871 void test_vsuxseg7ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
8872   return vsuxseg7ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8873 }
8874 
8875 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i16m1_m(
8876 // CHECK-RV64-NEXT:  entry:
8877 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8878 // CHECK-RV64-NEXT:    ret void
8879 //
test_vsuxseg8ei32_v_i16m1_m(vbool16_t mask,int16_t * base,vuint32m2_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)8880 void test_vsuxseg8ei32_v_i16m1_m (vbool16_t mask, int16_t *base, vuint32m2_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
8881   return vsuxseg8ei32_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8882 }
8883 
8884 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m2_m(
8885 // CHECK-RV64-NEXT:  entry:
8886 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8887 // CHECK-RV64-NEXT:    ret void
8888 //
test_vsuxseg2ei32_v_i16m2_m(vbool8_t mask,int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)8889 void test_vsuxseg2ei32_v_i16m2_m (vbool8_t mask, int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
8890   return vsuxseg2ei32_v_i16m2_m(mask, base, bindex, v0, v1, vl);
8891 }
8892 
8893 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i16m2_m(
8894 // CHECK-RV64-NEXT:  entry:
8895 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8896 // CHECK-RV64-NEXT:    ret void
8897 //
test_vsuxseg3ei32_v_i16m2_m(vbool8_t mask,int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)8898 void test_vsuxseg3ei32_v_i16m2_m (vbool8_t mask, int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
8899   return vsuxseg3ei32_v_i16m2_m(mask, base, bindex, v0, v1, v2, vl);
8900 }
8901 
8902 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i16m2_m(
8903 // CHECK-RV64-NEXT:  entry:
8904 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8905 // CHECK-RV64-NEXT:    ret void
8906 //
test_vsuxseg4ei32_v_i16m2_m(vbool8_t mask,int16_t * base,vuint32m4_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)8907 void test_vsuxseg4ei32_v_i16m2_m (vbool8_t mask, int16_t *base, vuint32m4_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
8908   return vsuxseg4ei32_v_i16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
8909 }
8910 
8911 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i16m4_m(
8912 // CHECK-RV64-NEXT:  entry:
8913 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8914 // CHECK-RV64-NEXT:    ret void
8915 //
test_vsuxseg2ei32_v_i16m4_m(vbool4_t mask,int16_t * base,vuint32m8_t bindex,vint16m4_t v0,vint16m4_t v1,size_t vl)8916 void test_vsuxseg2ei32_v_i16m4_m (vbool4_t mask, int16_t *base, vuint32m8_t bindex, vint16m4_t v0, vint16m4_t v1, size_t vl) {
8917   return vsuxseg2ei32_v_i16m4_m(mask, base, bindex, v0, v1, vl);
8918 }
8919 
8920 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16mf4_m(
8921 // CHECK-RV64-NEXT:  entry:
8922 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8923 // CHECK-RV64-NEXT:    ret void
8924 //
test_vsuxseg2ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,size_t vl)8925 void test_vsuxseg2ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, size_t vl) {
8926   return vsuxseg2ei64_v_i16mf4_m(mask, base, bindex, v0, v1, vl);
8927 }
8928 
8929 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16mf4_m(
8930 // CHECK-RV64-NEXT:  entry:
8931 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8932 // CHECK-RV64-NEXT:    ret void
8933 //
test_vsuxseg3ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,size_t vl)8934 void test_vsuxseg3ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, size_t vl) {
8935   return vsuxseg3ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, vl);
8936 }
8937 
8938 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16mf4_m(
8939 // CHECK-RV64-NEXT:  entry:
8940 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8941 // CHECK-RV64-NEXT:    ret void
8942 //
test_vsuxseg4ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,size_t vl)8943 void test_vsuxseg4ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, size_t vl) {
8944   return vsuxseg4ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
8945 }
8946 
8947 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16mf4_m(
8948 // CHECK-RV64-NEXT:  entry:
8949 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8950 // CHECK-RV64-NEXT:    ret void
8951 //
test_vsuxseg5ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,size_t vl)8952 void test_vsuxseg5ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, size_t vl) {
8953   return vsuxseg5ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
8954 }
8955 
8956 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16mf4_m(
8957 // CHECK-RV64-NEXT:  entry:
8958 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8959 // CHECK-RV64-NEXT:    ret void
8960 //
test_vsuxseg6ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,size_t vl)8961 void test_vsuxseg6ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, size_t vl) {
8962   return vsuxseg6ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
8963 }
8964 
8965 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16mf4_m(
8966 // CHECK-RV64-NEXT:  entry:
8967 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8968 // CHECK-RV64-NEXT:    ret void
8969 //
test_vsuxseg7ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,size_t vl)8970 void test_vsuxseg7ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, size_t vl) {
8971   return vsuxseg7ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
8972 }
8973 
8974 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16mf4_m(
8975 // CHECK-RV64-NEXT:  entry:
8976 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8977 // CHECK-RV64-NEXT:    ret void
8978 //
test_vsuxseg8ei64_v_i16mf4_m(vbool64_t mask,int16_t * base,vuint64m1_t bindex,vint16mf4_t v0,vint16mf4_t v1,vint16mf4_t v2,vint16mf4_t v3,vint16mf4_t v4,vint16mf4_t v5,vint16mf4_t v6,vint16mf4_t v7,size_t vl)8979 void test_vsuxseg8ei64_v_i16mf4_m (vbool64_t mask, int16_t *base, vuint64m1_t bindex, vint16mf4_t v0, vint16mf4_t v1, vint16mf4_t v2, vint16mf4_t v3, vint16mf4_t v4, vint16mf4_t v5, vint16mf4_t v6, vint16mf4_t v7, size_t vl) {
8980   return vsuxseg8ei64_v_i16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
8981 }
8982 
8983 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16mf2_m(
8984 // CHECK-RV64-NEXT:  entry:
8985 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8986 // CHECK-RV64-NEXT:    ret void
8987 //
test_vsuxseg2ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,size_t vl)8988 void test_vsuxseg2ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, size_t vl) {
8989   return vsuxseg2ei64_v_i16mf2_m(mask, base, bindex, v0, v1, vl);
8990 }
8991 
8992 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16mf2_m(
8993 // CHECK-RV64-NEXT:  entry:
8994 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
8995 // CHECK-RV64-NEXT:    ret void
8996 //
test_vsuxseg3ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,size_t vl)8997 void test_vsuxseg3ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, size_t vl) {
8998   return vsuxseg3ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, vl);
8999 }
9000 
9001 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16mf2_m(
9002 // CHECK-RV64-NEXT:  entry:
9003 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9004 // CHECK-RV64-NEXT:    ret void
9005 //
test_vsuxseg4ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,size_t vl)9006 void test_vsuxseg4ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, size_t vl) {
9007   return vsuxseg4ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9008 }
9009 
9010 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16mf2_m(
9011 // CHECK-RV64-NEXT:  entry:
9012 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9013 // CHECK-RV64-NEXT:    ret void
9014 //
test_vsuxseg5ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,size_t vl)9015 void test_vsuxseg5ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, size_t vl) {
9016   return vsuxseg5ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9017 }
9018 
9019 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16mf2_m(
9020 // CHECK-RV64-NEXT:  entry:
9021 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9022 // CHECK-RV64-NEXT:    ret void
9023 //
test_vsuxseg6ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,size_t vl)9024 void test_vsuxseg6ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, size_t vl) {
9025   return vsuxseg6ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9026 }
9027 
9028 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16mf2_m(
9029 // CHECK-RV64-NEXT:  entry:
9030 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9031 // CHECK-RV64-NEXT:    ret void
9032 //
test_vsuxseg7ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,size_t vl)9033 void test_vsuxseg7ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, size_t vl) {
9034   return vsuxseg7ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9035 }
9036 
9037 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16mf2_m(
9038 // CHECK-RV64-NEXT:  entry:
9039 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9040 // CHECK-RV64-NEXT:    ret void
9041 //
test_vsuxseg8ei64_v_i16mf2_m(vbool32_t mask,int16_t * base,vuint64m2_t bindex,vint16mf2_t v0,vint16mf2_t v1,vint16mf2_t v2,vint16mf2_t v3,vint16mf2_t v4,vint16mf2_t v5,vint16mf2_t v6,vint16mf2_t v7,size_t vl)9042 void test_vsuxseg8ei64_v_i16mf2_m (vbool32_t mask, int16_t *base, vuint64m2_t bindex, vint16mf2_t v0, vint16mf2_t v1, vint16mf2_t v2, vint16mf2_t v3, vint16mf2_t v4, vint16mf2_t v5, vint16mf2_t v6, vint16mf2_t v7, size_t vl) {
9043   return vsuxseg8ei64_v_i16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9044 }
9045 
9046 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16m1_m(
9047 // CHECK-RV64-NEXT:  entry:
9048 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9049 // CHECK-RV64-NEXT:    ret void
9050 //
test_vsuxseg2ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,size_t vl)9051 void test_vsuxseg2ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, size_t vl) {
9052   return vsuxseg2ei64_v_i16m1_m(mask, base, bindex, v0, v1, vl);
9053 }
9054 
9055 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16m1_m(
9056 // CHECK-RV64-NEXT:  entry:
9057 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9058 // CHECK-RV64-NEXT:    ret void
9059 //
test_vsuxseg3ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,size_t vl)9060 void test_vsuxseg3ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, size_t vl) {
9061   return vsuxseg3ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, vl);
9062 }
9063 
9064 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16m1_m(
9065 // CHECK-RV64-NEXT:  entry:
9066 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9067 // CHECK-RV64-NEXT:    ret void
9068 //
test_vsuxseg4ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,size_t vl)9069 void test_vsuxseg4ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, size_t vl) {
9070   return vsuxseg4ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9071 }
9072 
9073 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i16m1_m(
9074 // CHECK-RV64-NEXT:  entry:
9075 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9076 // CHECK-RV64-NEXT:    ret void
9077 //
test_vsuxseg5ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,size_t vl)9078 void test_vsuxseg5ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, size_t vl) {
9079   return vsuxseg5ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9080 }
9081 
9082 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i16m1_m(
9083 // CHECK-RV64-NEXT:  entry:
9084 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9085 // CHECK-RV64-NEXT:    ret void
9086 //
test_vsuxseg6ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,size_t vl)9087 void test_vsuxseg6ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, size_t vl) {
9088   return vsuxseg6ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9089 }
9090 
9091 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i16m1_m(
9092 // CHECK-RV64-NEXT:  entry:
9093 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9094 // CHECK-RV64-NEXT:    ret void
9095 //
test_vsuxseg7ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,size_t vl)9096 void test_vsuxseg7ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, size_t vl) {
9097   return vsuxseg7ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9098 }
9099 
9100 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i16m1_m(
9101 // CHECK-RV64-NEXT:  entry:
9102 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9103 // CHECK-RV64-NEXT:    ret void
9104 //
test_vsuxseg8ei64_v_i16m1_m(vbool16_t mask,int16_t * base,vuint64m4_t bindex,vint16m1_t v0,vint16m1_t v1,vint16m1_t v2,vint16m1_t v3,vint16m1_t v4,vint16m1_t v5,vint16m1_t v6,vint16m1_t v7,size_t vl)9105 void test_vsuxseg8ei64_v_i16m1_m (vbool16_t mask, int16_t *base, vuint64m4_t bindex, vint16m1_t v0, vint16m1_t v1, vint16m1_t v2, vint16m1_t v3, vint16m1_t v4, vint16m1_t v5, vint16m1_t v6, vint16m1_t v7, size_t vl) {
9106   return vsuxseg8ei64_v_i16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9107 }
9108 
9109 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i16m2_m(
9110 // CHECK-RV64-NEXT:  entry:
9111 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9112 // CHECK-RV64-NEXT:    ret void
9113 //
test_vsuxseg2ei64_v_i16m2_m(vbool8_t mask,int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,size_t vl)9114 void test_vsuxseg2ei64_v_i16m2_m (vbool8_t mask, int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, size_t vl) {
9115   return vsuxseg2ei64_v_i16m2_m(mask, base, bindex, v0, v1, vl);
9116 }
9117 
9118 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i16m2_m(
9119 // CHECK-RV64-NEXT:  entry:
9120 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9121 // CHECK-RV64-NEXT:    ret void
9122 //
test_vsuxseg3ei64_v_i16m2_m(vbool8_t mask,int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,size_t vl)9123 void test_vsuxseg3ei64_v_i16m2_m (vbool8_t mask, int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, size_t vl) {
9124   return vsuxseg3ei64_v_i16m2_m(mask, base, bindex, v0, v1, v2, vl);
9125 }
9126 
9127 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i16m2_m(
9128 // CHECK-RV64-NEXT:  entry:
9129 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9130 // CHECK-RV64-NEXT:    ret void
9131 //
test_vsuxseg4ei64_v_i16m2_m(vbool8_t mask,int16_t * base,vuint64m8_t bindex,vint16m2_t v0,vint16m2_t v1,vint16m2_t v2,vint16m2_t v3,size_t vl)9132 void test_vsuxseg4ei64_v_i16m2_m (vbool8_t mask, int16_t *base, vuint64m8_t bindex, vint16m2_t v0, vint16m2_t v1, vint16m2_t v2, vint16m2_t v3, size_t vl) {
9133   return vsuxseg4ei64_v_i16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9134 }
9135 
9136 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32mf2_m(
9137 // CHECK-RV64-NEXT:  entry:
9138 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9139 // CHECK-RV64-NEXT:    ret void
9140 //
test_vsuxseg2ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)9141 void test_vsuxseg2ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
9142   return vsuxseg2ei8_v_i32mf2_m(mask, base, bindex, v0, v1, vl);
9143 }
9144 
9145 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32mf2_m(
9146 // CHECK-RV64-NEXT:  entry:
9147 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9148 // CHECK-RV64-NEXT:    ret void
9149 //
test_vsuxseg3ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)9150 void test_vsuxseg3ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
9151   return vsuxseg3ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, vl);
9152 }
9153 
9154 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32mf2_m(
9155 // CHECK-RV64-NEXT:  entry:
9156 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9157 // CHECK-RV64-NEXT:    ret void
9158 //
test_vsuxseg4ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)9159 void test_vsuxseg4ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
9160   return vsuxseg4ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9161 }
9162 
9163 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i32mf2_m(
9164 // CHECK-RV64-NEXT:  entry:
9165 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9166 // CHECK-RV64-NEXT:    ret void
9167 //
test_vsuxseg5ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)9168 void test_vsuxseg5ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
9169   return vsuxseg5ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9170 }
9171 
9172 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i32mf2_m(
9173 // CHECK-RV64-NEXT:  entry:
9174 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9175 // CHECK-RV64-NEXT:    ret void
9176 //
test_vsuxseg6ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)9177 void test_vsuxseg6ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
9178   return vsuxseg6ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9179 }
9180 
9181 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i32mf2_m(
9182 // CHECK-RV64-NEXT:  entry:
9183 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9184 // CHECK-RV64-NEXT:    ret void
9185 //
test_vsuxseg7ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)9186 void test_vsuxseg7ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
9187   return vsuxseg7ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9188 }
9189 
9190 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i32mf2_m(
9191 // CHECK-RV64-NEXT:  entry:
9192 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9193 // CHECK-RV64-NEXT:    ret void
9194 //
test_vsuxseg8ei8_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint8mf8_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)9195 void test_vsuxseg8ei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
9196   return vsuxseg8ei8_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9197 }
9198 
9199 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m1_m(
9200 // CHECK-RV64-NEXT:  entry:
9201 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9202 // CHECK-RV64-NEXT:    ret void
9203 //
test_vsuxseg2ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)9204 void test_vsuxseg2ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
9205   return vsuxseg2ei8_v_i32m1_m(mask, base, bindex, v0, v1, vl);
9206 }
9207 
9208 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32m1_m(
9209 // CHECK-RV64-NEXT:  entry:
9210 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9211 // CHECK-RV64-NEXT:    ret void
9212 //
test_vsuxseg3ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)9213 void test_vsuxseg3ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
9214   return vsuxseg3ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, vl);
9215 }
9216 
9217 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32m1_m(
9218 // CHECK-RV64-NEXT:  entry:
9219 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9220 // CHECK-RV64-NEXT:    ret void
9221 //
test_vsuxseg4ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)9222 void test_vsuxseg4ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
9223   return vsuxseg4ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9224 }
9225 
9226 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i32m1_m(
9227 // CHECK-RV64-NEXT:  entry:
9228 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9229 // CHECK-RV64-NEXT:    ret void
9230 //
test_vsuxseg5ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)9231 void test_vsuxseg5ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
9232   return vsuxseg5ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9233 }
9234 
9235 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i32m1_m(
9236 // CHECK-RV64-NEXT:  entry:
9237 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9238 // CHECK-RV64-NEXT:    ret void
9239 //
test_vsuxseg6ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)9240 void test_vsuxseg6ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
9241   return vsuxseg6ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9242 }
9243 
9244 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i32m1_m(
9245 // CHECK-RV64-NEXT:  entry:
9246 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9247 // CHECK-RV64-NEXT:    ret void
9248 //
test_vsuxseg7ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)9249 void test_vsuxseg7ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
9250   return vsuxseg7ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9251 }
9252 
9253 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i32m1_m(
9254 // CHECK-RV64-NEXT:  entry:
9255 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9256 // CHECK-RV64-NEXT:    ret void
9257 //
test_vsuxseg8ei8_v_i32m1_m(vbool32_t mask,int32_t * base,vuint8mf4_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)9258 void test_vsuxseg8ei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
9259   return vsuxseg8ei8_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9260 }
9261 
9262 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m2_m(
9263 // CHECK-RV64-NEXT:  entry:
9264 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9265 // CHECK-RV64-NEXT:    ret void
9266 //
test_vsuxseg2ei8_v_i32m2_m(vbool16_t mask,int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)9267 void test_vsuxseg2ei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
9268   return vsuxseg2ei8_v_i32m2_m(mask, base, bindex, v0, v1, vl);
9269 }
9270 
9271 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i32m2_m(
9272 // CHECK-RV64-NEXT:  entry:
9273 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9274 // CHECK-RV64-NEXT:    ret void
9275 //
test_vsuxseg3ei8_v_i32m2_m(vbool16_t mask,int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)9276 void test_vsuxseg3ei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
9277   return vsuxseg3ei8_v_i32m2_m(mask, base, bindex, v0, v1, v2, vl);
9278 }
9279 
9280 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i32m2_m(
9281 // CHECK-RV64-NEXT:  entry:
9282 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9283 // CHECK-RV64-NEXT:    ret void
9284 //
test_vsuxseg4ei8_v_i32m2_m(vbool16_t mask,int32_t * base,vuint8mf2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)9285 void test_vsuxseg4ei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
9286   return vsuxseg4ei8_v_i32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9287 }
9288 
9289 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i32m4_m(
9290 // CHECK-RV64-NEXT:  entry:
9291 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9292 // CHECK-RV64-NEXT:    ret void
9293 //
test_vsuxseg2ei8_v_i32m4_m(vbool8_t mask,int32_t * base,vuint8m1_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)9294 void test_vsuxseg2ei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
9295   return vsuxseg2ei8_v_i32m4_m(mask, base, bindex, v0, v1, vl);
9296 }
9297 
9298 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32mf2_m(
9299 // CHECK-RV64-NEXT:  entry:
9300 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9301 // CHECK-RV64-NEXT:    ret void
9302 //
test_vsuxseg2ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)9303 void test_vsuxseg2ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
9304   return vsuxseg2ei16_v_i32mf2_m(mask, base, bindex, v0, v1, vl);
9305 }
9306 
9307 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32mf2_m(
9308 // CHECK-RV64-NEXT:  entry:
9309 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9310 // CHECK-RV64-NEXT:    ret void
9311 //
test_vsuxseg3ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)9312 void test_vsuxseg3ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
9313   return vsuxseg3ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, vl);
9314 }
9315 
9316 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32mf2_m(
9317 // CHECK-RV64-NEXT:  entry:
9318 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9319 // CHECK-RV64-NEXT:    ret void
9320 //
test_vsuxseg4ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)9321 void test_vsuxseg4ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
9322   return vsuxseg4ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9323 }
9324 
9325 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i32mf2_m(
9326 // CHECK-RV64-NEXT:  entry:
9327 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9328 // CHECK-RV64-NEXT:    ret void
9329 //
test_vsuxseg5ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)9330 void test_vsuxseg5ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
9331   return vsuxseg5ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9332 }
9333 
9334 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i32mf2_m(
9335 // CHECK-RV64-NEXT:  entry:
9336 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9337 // CHECK-RV64-NEXT:    ret void
9338 //
test_vsuxseg6ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)9339 void test_vsuxseg6ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
9340   return vsuxseg6ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9341 }
9342 
9343 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i32mf2_m(
9344 // CHECK-RV64-NEXT:  entry:
9345 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9346 // CHECK-RV64-NEXT:    ret void
9347 //
test_vsuxseg7ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)9348 void test_vsuxseg7ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
9349   return vsuxseg7ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9350 }
9351 
9352 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i32mf2_m(
9353 // CHECK-RV64-NEXT:  entry:
9354 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9355 // CHECK-RV64-NEXT:    ret void
9356 //
test_vsuxseg8ei16_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint16mf4_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)9357 void test_vsuxseg8ei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
9358   return vsuxseg8ei16_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9359 }
9360 
9361 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m1_m(
9362 // CHECK-RV64-NEXT:  entry:
9363 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9364 // CHECK-RV64-NEXT:    ret void
9365 //
test_vsuxseg2ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)9366 void test_vsuxseg2ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
9367   return vsuxseg2ei16_v_i32m1_m(mask, base, bindex, v0, v1, vl);
9368 }
9369 
9370 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32m1_m(
9371 // CHECK-RV64-NEXT:  entry:
9372 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9373 // CHECK-RV64-NEXT:    ret void
9374 //
test_vsuxseg3ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)9375 void test_vsuxseg3ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
9376   return vsuxseg3ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, vl);
9377 }
9378 
9379 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32m1_m(
9380 // CHECK-RV64-NEXT:  entry:
9381 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9382 // CHECK-RV64-NEXT:    ret void
9383 //
test_vsuxseg4ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)9384 void test_vsuxseg4ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
9385   return vsuxseg4ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9386 }
9387 
9388 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i32m1_m(
9389 // CHECK-RV64-NEXT:  entry:
9390 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9391 // CHECK-RV64-NEXT:    ret void
9392 //
test_vsuxseg5ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)9393 void test_vsuxseg5ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
9394   return vsuxseg5ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9395 }
9396 
9397 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i32m1_m(
9398 // CHECK-RV64-NEXT:  entry:
9399 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9400 // CHECK-RV64-NEXT:    ret void
9401 //
test_vsuxseg6ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)9402 void test_vsuxseg6ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
9403   return vsuxseg6ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9404 }
9405 
9406 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i32m1_m(
9407 // CHECK-RV64-NEXT:  entry:
9408 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9409 // CHECK-RV64-NEXT:    ret void
9410 //
test_vsuxseg7ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)9411 void test_vsuxseg7ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
9412   return vsuxseg7ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9413 }
9414 
9415 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i32m1_m(
9416 // CHECK-RV64-NEXT:  entry:
9417 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9418 // CHECK-RV64-NEXT:    ret void
9419 //
test_vsuxseg8ei16_v_i32m1_m(vbool32_t mask,int32_t * base,vuint16mf2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)9420 void test_vsuxseg8ei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
9421   return vsuxseg8ei16_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9422 }
9423 
9424 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m2_m(
9425 // CHECK-RV64-NEXT:  entry:
9426 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9427 // CHECK-RV64-NEXT:    ret void
9428 //
test_vsuxseg2ei16_v_i32m2_m(vbool16_t mask,int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)9429 void test_vsuxseg2ei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
9430   return vsuxseg2ei16_v_i32m2_m(mask, base, bindex, v0, v1, vl);
9431 }
9432 
9433 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i32m2_m(
9434 // CHECK-RV64-NEXT:  entry:
9435 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9436 // CHECK-RV64-NEXT:    ret void
9437 //
test_vsuxseg3ei16_v_i32m2_m(vbool16_t mask,int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)9438 void test_vsuxseg3ei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
9439   return vsuxseg3ei16_v_i32m2_m(mask, base, bindex, v0, v1, v2, vl);
9440 }
9441 
9442 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i32m2_m(
9443 // CHECK-RV64-NEXT:  entry:
9444 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9445 // CHECK-RV64-NEXT:    ret void
9446 //
test_vsuxseg4ei16_v_i32m2_m(vbool16_t mask,int32_t * base,vuint16m1_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)9447 void test_vsuxseg4ei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
9448   return vsuxseg4ei16_v_i32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9449 }
9450 
9451 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i32m4_m(
9452 // CHECK-RV64-NEXT:  entry:
9453 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9454 // CHECK-RV64-NEXT:    ret void
9455 //
test_vsuxseg2ei16_v_i32m4_m(vbool8_t mask,int32_t * base,vuint16m2_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)9456 void test_vsuxseg2ei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
9457   return vsuxseg2ei16_v_i32m4_m(mask, base, bindex, v0, v1, vl);
9458 }
9459 
9460 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32mf2_m(
9461 // CHECK-RV64-NEXT:  entry:
9462 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9463 // CHECK-RV64-NEXT:    ret void
9464 //
test_vsuxseg2ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)9465 void test_vsuxseg2ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
9466   return vsuxseg2ei32_v_i32mf2_m(mask, base, bindex, v0, v1, vl);
9467 }
9468 
9469 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32mf2_m(
9470 // CHECK-RV64-NEXT:  entry:
9471 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9472 // CHECK-RV64-NEXT:    ret void
9473 //
test_vsuxseg3ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)9474 void test_vsuxseg3ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
9475   return vsuxseg3ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, vl);
9476 }
9477 
9478 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32mf2_m(
9479 // CHECK-RV64-NEXT:  entry:
9480 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9481 // CHECK-RV64-NEXT:    ret void
9482 //
test_vsuxseg4ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)9483 void test_vsuxseg4ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
9484   return vsuxseg4ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9485 }
9486 
9487 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i32mf2_m(
9488 // CHECK-RV64-NEXT:  entry:
9489 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9490 // CHECK-RV64-NEXT:    ret void
9491 //
test_vsuxseg5ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)9492 void test_vsuxseg5ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
9493   return vsuxseg5ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9494 }
9495 
9496 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i32mf2_m(
9497 // CHECK-RV64-NEXT:  entry:
9498 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9499 // CHECK-RV64-NEXT:    ret void
9500 //
test_vsuxseg6ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)9501 void test_vsuxseg6ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
9502   return vsuxseg6ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9503 }
9504 
9505 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i32mf2_m(
9506 // CHECK-RV64-NEXT:  entry:
9507 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9508 // CHECK-RV64-NEXT:    ret void
9509 //
test_vsuxseg7ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)9510 void test_vsuxseg7ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
9511   return vsuxseg7ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9512 }
9513 
9514 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i32mf2_m(
9515 // CHECK-RV64-NEXT:  entry:
9516 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9517 // CHECK-RV64-NEXT:    ret void
9518 //
test_vsuxseg8ei32_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint32mf2_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)9519 void test_vsuxseg8ei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
9520   return vsuxseg8ei32_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9521 }
9522 
9523 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m1_m(
9524 // CHECK-RV64-NEXT:  entry:
9525 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9526 // CHECK-RV64-NEXT:    ret void
9527 //
test_vsuxseg2ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)9528 void test_vsuxseg2ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
9529   return vsuxseg2ei32_v_i32m1_m(mask, base, bindex, v0, v1, vl);
9530 }
9531 
9532 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32m1_m(
9533 // CHECK-RV64-NEXT:  entry:
9534 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9535 // CHECK-RV64-NEXT:    ret void
9536 //
test_vsuxseg3ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)9537 void test_vsuxseg3ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
9538   return vsuxseg3ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, vl);
9539 }
9540 
9541 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32m1_m(
9542 // CHECK-RV64-NEXT:  entry:
9543 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9544 // CHECK-RV64-NEXT:    ret void
9545 //
test_vsuxseg4ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)9546 void test_vsuxseg4ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
9547   return vsuxseg4ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9548 }
9549 
9550 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i32m1_m(
9551 // CHECK-RV64-NEXT:  entry:
9552 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9553 // CHECK-RV64-NEXT:    ret void
9554 //
test_vsuxseg5ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)9555 void test_vsuxseg5ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
9556   return vsuxseg5ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9557 }
9558 
9559 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i32m1_m(
9560 // CHECK-RV64-NEXT:  entry:
9561 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9562 // CHECK-RV64-NEXT:    ret void
9563 //
test_vsuxseg6ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)9564 void test_vsuxseg6ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
9565   return vsuxseg6ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9566 }
9567 
9568 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i32m1_m(
9569 // CHECK-RV64-NEXT:  entry:
9570 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9571 // CHECK-RV64-NEXT:    ret void
9572 //
test_vsuxseg7ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)9573 void test_vsuxseg7ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
9574   return vsuxseg7ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9575 }
9576 
9577 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i32m1_m(
9578 // CHECK-RV64-NEXT:  entry:
9579 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9580 // CHECK-RV64-NEXT:    ret void
9581 //
test_vsuxseg8ei32_v_i32m1_m(vbool32_t mask,int32_t * base,vuint32m1_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)9582 void test_vsuxseg8ei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
9583   return vsuxseg8ei32_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9584 }
9585 
9586 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m2_m(
9587 // CHECK-RV64-NEXT:  entry:
9588 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9589 // CHECK-RV64-NEXT:    ret void
9590 //
test_vsuxseg2ei32_v_i32m2_m(vbool16_t mask,int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)9591 void test_vsuxseg2ei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
9592   return vsuxseg2ei32_v_i32m2_m(mask, base, bindex, v0, v1, vl);
9593 }
9594 
9595 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i32m2_m(
9596 // CHECK-RV64-NEXT:  entry:
9597 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9598 // CHECK-RV64-NEXT:    ret void
9599 //
test_vsuxseg3ei32_v_i32m2_m(vbool16_t mask,int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)9600 void test_vsuxseg3ei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
9601   return vsuxseg3ei32_v_i32m2_m(mask, base, bindex, v0, v1, v2, vl);
9602 }
9603 
9604 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i32m2_m(
9605 // CHECK-RV64-NEXT:  entry:
9606 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9607 // CHECK-RV64-NEXT:    ret void
9608 //
test_vsuxseg4ei32_v_i32m2_m(vbool16_t mask,int32_t * base,vuint32m2_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)9609 void test_vsuxseg4ei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
9610   return vsuxseg4ei32_v_i32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9611 }
9612 
9613 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i32m4_m(
9614 // CHECK-RV64-NEXT:  entry:
9615 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9616 // CHECK-RV64-NEXT:    ret void
9617 //
test_vsuxseg2ei32_v_i32m4_m(vbool8_t mask,int32_t * base,vuint32m4_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)9618 void test_vsuxseg2ei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
9619   return vsuxseg2ei32_v_i32m4_m(mask, base, bindex, v0, v1, vl);
9620 }
9621 
9622 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32mf2_m(
9623 // CHECK-RV64-NEXT:  entry:
9624 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9625 // CHECK-RV64-NEXT:    ret void
9626 //
test_vsuxseg2ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,size_t vl)9627 void test_vsuxseg2ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, size_t vl) {
9628   return vsuxseg2ei64_v_i32mf2_m(mask, base, bindex, v0, v1, vl);
9629 }
9630 
9631 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32mf2_m(
9632 // CHECK-RV64-NEXT:  entry:
9633 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9634 // CHECK-RV64-NEXT:    ret void
9635 //
test_vsuxseg3ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,size_t vl)9636 void test_vsuxseg3ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, size_t vl) {
9637   return vsuxseg3ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, vl);
9638 }
9639 
9640 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32mf2_m(
9641 // CHECK-RV64-NEXT:  entry:
9642 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9643 // CHECK-RV64-NEXT:    ret void
9644 //
test_vsuxseg4ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,size_t vl)9645 void test_vsuxseg4ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, size_t vl) {
9646   return vsuxseg4ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9647 }
9648 
9649 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i32mf2_m(
9650 // CHECK-RV64-NEXT:  entry:
9651 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9652 // CHECK-RV64-NEXT:    ret void
9653 //
test_vsuxseg5ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,size_t vl)9654 void test_vsuxseg5ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, size_t vl) {
9655   return vsuxseg5ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9656 }
9657 
9658 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i32mf2_m(
9659 // CHECK-RV64-NEXT:  entry:
9660 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9661 // CHECK-RV64-NEXT:    ret void
9662 //
test_vsuxseg6ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,size_t vl)9663 void test_vsuxseg6ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, size_t vl) {
9664   return vsuxseg6ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9665 }
9666 
9667 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i32mf2_m(
9668 // CHECK-RV64-NEXT:  entry:
9669 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9670 // CHECK-RV64-NEXT:    ret void
9671 //
test_vsuxseg7ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,size_t vl)9672 void test_vsuxseg7ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, size_t vl) {
9673   return vsuxseg7ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9674 }
9675 
9676 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i32mf2_m(
9677 // CHECK-RV64-NEXT:  entry:
9678 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9679 // CHECK-RV64-NEXT:    ret void
9680 //
test_vsuxseg8ei64_v_i32mf2_m(vbool64_t mask,int32_t * base,vuint64m1_t bindex,vint32mf2_t v0,vint32mf2_t v1,vint32mf2_t v2,vint32mf2_t v3,vint32mf2_t v4,vint32mf2_t v5,vint32mf2_t v6,vint32mf2_t v7,size_t vl)9681 void test_vsuxseg8ei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t v0, vint32mf2_t v1, vint32mf2_t v2, vint32mf2_t v3, vint32mf2_t v4, vint32mf2_t v5, vint32mf2_t v6, vint32mf2_t v7, size_t vl) {
9682   return vsuxseg8ei64_v_i32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9683 }
9684 
9685 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m1_m(
9686 // CHECK-RV64-NEXT:  entry:
9687 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9688 // CHECK-RV64-NEXT:    ret void
9689 //
test_vsuxseg2ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,size_t vl)9690 void test_vsuxseg2ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, size_t vl) {
9691   return vsuxseg2ei64_v_i32m1_m(mask, base, bindex, v0, v1, vl);
9692 }
9693 
9694 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32m1_m(
9695 // CHECK-RV64-NEXT:  entry:
9696 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9697 // CHECK-RV64-NEXT:    ret void
9698 //
test_vsuxseg3ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,size_t vl)9699 void test_vsuxseg3ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, size_t vl) {
9700   return vsuxseg3ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, vl);
9701 }
9702 
9703 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32m1_m(
9704 // CHECK-RV64-NEXT:  entry:
9705 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9706 // CHECK-RV64-NEXT:    ret void
9707 //
test_vsuxseg4ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,size_t vl)9708 void test_vsuxseg4ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, size_t vl) {
9709   return vsuxseg4ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9710 }
9711 
9712 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i32m1_m(
9713 // CHECK-RV64-NEXT:  entry:
9714 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9715 // CHECK-RV64-NEXT:    ret void
9716 //
test_vsuxseg5ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,size_t vl)9717 void test_vsuxseg5ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, size_t vl) {
9718   return vsuxseg5ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9719 }
9720 
9721 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i32m1_m(
9722 // CHECK-RV64-NEXT:  entry:
9723 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9724 // CHECK-RV64-NEXT:    ret void
9725 //
test_vsuxseg6ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,size_t vl)9726 void test_vsuxseg6ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, size_t vl) {
9727   return vsuxseg6ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9728 }
9729 
9730 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i32m1_m(
9731 // CHECK-RV64-NEXT:  entry:
9732 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9733 // CHECK-RV64-NEXT:    ret void
9734 //
test_vsuxseg7ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,size_t vl)9735 void test_vsuxseg7ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, size_t vl) {
9736   return vsuxseg7ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9737 }
9738 
9739 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i32m1_m(
9740 // CHECK-RV64-NEXT:  entry:
9741 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9742 // CHECK-RV64-NEXT:    ret void
9743 //
test_vsuxseg8ei64_v_i32m1_m(vbool32_t mask,int32_t * base,vuint64m2_t bindex,vint32m1_t v0,vint32m1_t v1,vint32m1_t v2,vint32m1_t v3,vint32m1_t v4,vint32m1_t v5,vint32m1_t v6,vint32m1_t v7,size_t vl)9744 void test_vsuxseg8ei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t v0, vint32m1_t v1, vint32m1_t v2, vint32m1_t v3, vint32m1_t v4, vint32m1_t v5, vint32m1_t v6, vint32m1_t v7, size_t vl) {
9745   return vsuxseg8ei64_v_i32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9746 }
9747 
9748 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m2_m(
9749 // CHECK-RV64-NEXT:  entry:
9750 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9751 // CHECK-RV64-NEXT:    ret void
9752 //
test_vsuxseg2ei64_v_i32m2_m(vbool16_t mask,int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,size_t vl)9753 void test_vsuxseg2ei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, size_t vl) {
9754   return vsuxseg2ei64_v_i32m2_m(mask, base, bindex, v0, v1, vl);
9755 }
9756 
9757 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i32m2_m(
9758 // CHECK-RV64-NEXT:  entry:
9759 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9760 // CHECK-RV64-NEXT:    ret void
9761 //
test_vsuxseg3ei64_v_i32m2_m(vbool16_t mask,int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,size_t vl)9762 void test_vsuxseg3ei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, size_t vl) {
9763   return vsuxseg3ei64_v_i32m2_m(mask, base, bindex, v0, v1, v2, vl);
9764 }
9765 
9766 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i32m2_m(
9767 // CHECK-RV64-NEXT:  entry:
9768 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9769 // CHECK-RV64-NEXT:    ret void
9770 //
test_vsuxseg4ei64_v_i32m2_m(vbool16_t mask,int32_t * base,vuint64m4_t bindex,vint32m2_t v0,vint32m2_t v1,vint32m2_t v2,vint32m2_t v3,size_t vl)9771 void test_vsuxseg4ei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t v0, vint32m2_t v1, vint32m2_t v2, vint32m2_t v3, size_t vl) {
9772   return vsuxseg4ei64_v_i32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9773 }
9774 
9775 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i32m4_m(
9776 // CHECK-RV64-NEXT:  entry:
9777 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9778 // CHECK-RV64-NEXT:    ret void
9779 //
test_vsuxseg2ei64_v_i32m4_m(vbool8_t mask,int32_t * base,vuint64m8_t bindex,vint32m4_t v0,vint32m4_t v1,size_t vl)9780 void test_vsuxseg2ei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t v0, vint32m4_t v1, size_t vl) {
9781   return vsuxseg2ei64_v_i32m4_m(mask, base, bindex, v0, v1, vl);
9782 }
9783 
9784 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m1_m(
9785 // CHECK-RV64-NEXT:  entry:
9786 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9787 // CHECK-RV64-NEXT:    ret void
9788 //
test_vsuxseg2ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)9789 void test_vsuxseg2ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
9790   return vsuxseg2ei8_v_i64m1_m(mask, base, bindex, v0, v1, vl);
9791 }
9792 
9793 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i64m1_m(
9794 // CHECK-RV64-NEXT:  entry:
9795 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9796 // CHECK-RV64-NEXT:    ret void
9797 //
test_vsuxseg3ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)9798 void test_vsuxseg3ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
9799   return vsuxseg3ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, vl);
9800 }
9801 
9802 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i64m1_m(
9803 // CHECK-RV64-NEXT:  entry:
9804 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9805 // CHECK-RV64-NEXT:    ret void
9806 //
test_vsuxseg4ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)9807 void test_vsuxseg4ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
9808   return vsuxseg4ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9809 }
9810 
9811 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_i64m1_m(
9812 // CHECK-RV64-NEXT:  entry:
9813 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9814 // CHECK-RV64-NEXT:    ret void
9815 //
test_vsuxseg5ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)9816 void test_vsuxseg5ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
9817   return vsuxseg5ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9818 }
9819 
9820 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_i64m1_m(
9821 // CHECK-RV64-NEXT:  entry:
9822 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9823 // CHECK-RV64-NEXT:    ret void
9824 //
test_vsuxseg6ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)9825 void test_vsuxseg6ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
9826   return vsuxseg6ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9827 }
9828 
9829 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_i64m1_m(
9830 // CHECK-RV64-NEXT:  entry:
9831 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9832 // CHECK-RV64-NEXT:    ret void
9833 //
test_vsuxseg7ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)9834 void test_vsuxseg7ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
9835   return vsuxseg7ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9836 }
9837 
9838 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_i64m1_m(
9839 // CHECK-RV64-NEXT:  entry:
9840 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9841 // CHECK-RV64-NEXT:    ret void
9842 //
test_vsuxseg8ei8_v_i64m1_m(vbool64_t mask,int64_t * base,vuint8mf8_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)9843 void test_vsuxseg8ei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
9844   return vsuxseg8ei8_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9845 }
9846 
9847 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m2_m(
9848 // CHECK-RV64-NEXT:  entry:
9849 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9850 // CHECK-RV64-NEXT:    ret void
9851 //
test_vsuxseg2ei8_v_i64m2_m(vbool32_t mask,int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)9852 void test_vsuxseg2ei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
9853   return vsuxseg2ei8_v_i64m2_m(mask, base, bindex, v0, v1, vl);
9854 }
9855 
9856 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_i64m2_m(
9857 // CHECK-RV64-NEXT:  entry:
9858 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9859 // CHECK-RV64-NEXT:    ret void
9860 //
test_vsuxseg3ei8_v_i64m2_m(vbool32_t mask,int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)9861 void test_vsuxseg3ei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
9862   return vsuxseg3ei8_v_i64m2_m(mask, base, bindex, v0, v1, v2, vl);
9863 }
9864 
9865 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_i64m2_m(
9866 // CHECK-RV64-NEXT:  entry:
9867 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9868 // CHECK-RV64-NEXT:    ret void
9869 //
test_vsuxseg4ei8_v_i64m2_m(vbool32_t mask,int64_t * base,vuint8mf4_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)9870 void test_vsuxseg4ei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
9871   return vsuxseg4ei8_v_i64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9872 }
9873 
9874 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_i64m4_m(
9875 // CHECK-RV64-NEXT:  entry:
9876 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9877 // CHECK-RV64-NEXT:    ret void
9878 //
test_vsuxseg2ei8_v_i64m4_m(vbool16_t mask,int64_t * base,vuint8mf2_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)9879 void test_vsuxseg2ei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
9880   return vsuxseg2ei8_v_i64m4_m(mask, base, bindex, v0, v1, vl);
9881 }
9882 
9883 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m1_m(
9884 // CHECK-RV64-NEXT:  entry:
9885 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9886 // CHECK-RV64-NEXT:    ret void
9887 //
test_vsuxseg2ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)9888 void test_vsuxseg2ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
9889   return vsuxseg2ei16_v_i64m1_m(mask, base, bindex, v0, v1, vl);
9890 }
9891 
9892 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i64m1_m(
9893 // CHECK-RV64-NEXT:  entry:
9894 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9895 // CHECK-RV64-NEXT:    ret void
9896 //
test_vsuxseg3ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)9897 void test_vsuxseg3ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
9898   return vsuxseg3ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, vl);
9899 }
9900 
9901 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i64m1_m(
9902 // CHECK-RV64-NEXT:  entry:
9903 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9904 // CHECK-RV64-NEXT:    ret void
9905 //
test_vsuxseg4ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)9906 void test_vsuxseg4ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
9907   return vsuxseg4ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
9908 }
9909 
9910 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_i64m1_m(
9911 // CHECK-RV64-NEXT:  entry:
9912 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9913 // CHECK-RV64-NEXT:    ret void
9914 //
test_vsuxseg5ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)9915 void test_vsuxseg5ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
9916   return vsuxseg5ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
9917 }
9918 
9919 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_i64m1_m(
9920 // CHECK-RV64-NEXT:  entry:
9921 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9922 // CHECK-RV64-NEXT:    ret void
9923 //
test_vsuxseg6ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)9924 void test_vsuxseg6ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
9925   return vsuxseg6ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
9926 }
9927 
9928 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_i64m1_m(
9929 // CHECK-RV64-NEXT:  entry:
9930 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9931 // CHECK-RV64-NEXT:    ret void
9932 //
test_vsuxseg7ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)9933 void test_vsuxseg7ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
9934   return vsuxseg7ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
9935 }
9936 
9937 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_i64m1_m(
9938 // CHECK-RV64-NEXT:  entry:
9939 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9940 // CHECK-RV64-NEXT:    ret void
9941 //
test_vsuxseg8ei16_v_i64m1_m(vbool64_t mask,int64_t * base,vuint16mf4_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)9942 void test_vsuxseg8ei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
9943   return vsuxseg8ei16_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
9944 }
9945 
9946 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m2_m(
9947 // CHECK-RV64-NEXT:  entry:
9948 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9949 // CHECK-RV64-NEXT:    ret void
9950 //
test_vsuxseg2ei16_v_i64m2_m(vbool32_t mask,int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)9951 void test_vsuxseg2ei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
9952   return vsuxseg2ei16_v_i64m2_m(mask, base, bindex, v0, v1, vl);
9953 }
9954 
9955 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_i64m2_m(
9956 // CHECK-RV64-NEXT:  entry:
9957 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9958 // CHECK-RV64-NEXT:    ret void
9959 //
test_vsuxseg3ei16_v_i64m2_m(vbool32_t mask,int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)9960 void test_vsuxseg3ei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
9961   return vsuxseg3ei16_v_i64m2_m(mask, base, bindex, v0, v1, v2, vl);
9962 }
9963 
9964 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_i64m2_m(
9965 // CHECK-RV64-NEXT:  entry:
9966 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9967 // CHECK-RV64-NEXT:    ret void
9968 //
test_vsuxseg4ei16_v_i64m2_m(vbool32_t mask,int64_t * base,vuint16mf2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)9969 void test_vsuxseg4ei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
9970   return vsuxseg4ei16_v_i64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
9971 }
9972 
9973 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_i64m4_m(
9974 // CHECK-RV64-NEXT:  entry:
9975 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9976 // CHECK-RV64-NEXT:    ret void
9977 //
test_vsuxseg2ei16_v_i64m4_m(vbool16_t mask,int64_t * base,vuint16m1_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)9978 void test_vsuxseg2ei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
9979   return vsuxseg2ei16_v_i64m4_m(mask, base, bindex, v0, v1, vl);
9980 }
9981 
9982 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m1_m(
9983 // CHECK-RV64-NEXT:  entry:
9984 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9985 // CHECK-RV64-NEXT:    ret void
9986 //
test_vsuxseg2ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)9987 void test_vsuxseg2ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
9988   return vsuxseg2ei32_v_i64m1_m(mask, base, bindex, v0, v1, vl);
9989 }
9990 
9991 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i64m1_m(
9992 // CHECK-RV64-NEXT:  entry:
9993 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
9994 // CHECK-RV64-NEXT:    ret void
9995 //
test_vsuxseg3ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)9996 void test_vsuxseg3ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
9997   return vsuxseg3ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, vl);
9998 }
9999 
10000 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i64m1_m(
10001 // CHECK-RV64-NEXT:  entry:
10002 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10003 // CHECK-RV64-NEXT:    ret void
10004 //
test_vsuxseg4ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)10005 void test_vsuxseg4ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
10006   return vsuxseg4ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
10007 }
10008 
10009 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_i64m1_m(
10010 // CHECK-RV64-NEXT:  entry:
10011 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10012 // CHECK-RV64-NEXT:    ret void
10013 //
test_vsuxseg5ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)10014 void test_vsuxseg5ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
10015   return vsuxseg5ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10016 }
10017 
10018 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_i64m1_m(
10019 // CHECK-RV64-NEXT:  entry:
10020 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10021 // CHECK-RV64-NEXT:    ret void
10022 //
test_vsuxseg6ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)10023 void test_vsuxseg6ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
10024   return vsuxseg6ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10025 }
10026 
10027 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_i64m1_m(
10028 // CHECK-RV64-NEXT:  entry:
10029 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10030 // CHECK-RV64-NEXT:    ret void
10031 //
test_vsuxseg7ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)10032 void test_vsuxseg7ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
10033   return vsuxseg7ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10034 }
10035 
10036 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_i64m1_m(
10037 // CHECK-RV64-NEXT:  entry:
10038 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10039 // CHECK-RV64-NEXT:    ret void
10040 //
test_vsuxseg8ei32_v_i64m1_m(vbool64_t mask,int64_t * base,vuint32mf2_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)10041 void test_vsuxseg8ei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
10042   return vsuxseg8ei32_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10043 }
10044 
10045 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m2_m(
10046 // CHECK-RV64-NEXT:  entry:
10047 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10048 // CHECK-RV64-NEXT:    ret void
10049 //
test_vsuxseg2ei32_v_i64m2_m(vbool32_t mask,int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)10050 void test_vsuxseg2ei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
10051   return vsuxseg2ei32_v_i64m2_m(mask, base, bindex, v0, v1, vl);
10052 }
10053 
10054 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_i64m2_m(
10055 // CHECK-RV64-NEXT:  entry:
10056 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10057 // CHECK-RV64-NEXT:    ret void
10058 //
test_vsuxseg3ei32_v_i64m2_m(vbool32_t mask,int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)10059 void test_vsuxseg3ei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
10060   return vsuxseg3ei32_v_i64m2_m(mask, base, bindex, v0, v1, v2, vl);
10061 }
10062 
10063 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_i64m2_m(
10064 // CHECK-RV64-NEXT:  entry:
10065 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10066 // CHECK-RV64-NEXT:    ret void
10067 //
test_vsuxseg4ei32_v_i64m2_m(vbool32_t mask,int64_t * base,vuint32m1_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)10068 void test_vsuxseg4ei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
10069   return vsuxseg4ei32_v_i64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10070 }
10071 
10072 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_i64m4_m(
10073 // CHECK-RV64-NEXT:  entry:
10074 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10075 // CHECK-RV64-NEXT:    ret void
10076 //
test_vsuxseg2ei32_v_i64m4_m(vbool16_t mask,int64_t * base,vuint32m2_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)10077 void test_vsuxseg2ei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
10078   return vsuxseg2ei32_v_i64m4_m(mask, base, bindex, v0, v1, vl);
10079 }
10080 
10081 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m1_m(
10082 // CHECK-RV64-NEXT:  entry:
10083 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10084 // CHECK-RV64-NEXT:    ret void
10085 //
test_vsuxseg2ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,size_t vl)10086 void test_vsuxseg2ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, size_t vl) {
10087   return vsuxseg2ei64_v_i64m1_m(mask, base, bindex, v0, v1, vl);
10088 }
10089 
10090 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i64m1_m(
10091 // CHECK-RV64-NEXT:  entry:
10092 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10093 // CHECK-RV64-NEXT:    ret void
10094 //
test_vsuxseg3ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,size_t vl)10095 void test_vsuxseg3ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, size_t vl) {
10096   return vsuxseg3ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, vl);
10097 }
10098 
10099 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i64m1_m(
10100 // CHECK-RV64-NEXT:  entry:
10101 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10102 // CHECK-RV64-NEXT:    ret void
10103 //
test_vsuxseg4ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,size_t vl)10104 void test_vsuxseg4ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, size_t vl) {
10105   return vsuxseg4ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
10106 }
10107 
10108 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_i64m1_m(
10109 // CHECK-RV64-NEXT:  entry:
10110 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10111 // CHECK-RV64-NEXT:    ret void
10112 //
test_vsuxseg5ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,size_t vl)10113 void test_vsuxseg5ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, size_t vl) {
10114   return vsuxseg5ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10115 }
10116 
10117 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_i64m1_m(
10118 // CHECK-RV64-NEXT:  entry:
10119 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10120 // CHECK-RV64-NEXT:    ret void
10121 //
test_vsuxseg6ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,size_t vl)10122 void test_vsuxseg6ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, size_t vl) {
10123   return vsuxseg6ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10124 }
10125 
10126 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_i64m1_m(
10127 // CHECK-RV64-NEXT:  entry:
10128 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10129 // CHECK-RV64-NEXT:    ret void
10130 //
test_vsuxseg7ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,size_t vl)10131 void test_vsuxseg7ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, size_t vl) {
10132   return vsuxseg7ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10133 }
10134 
10135 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_i64m1_m(
10136 // CHECK-RV64-NEXT:  entry:
10137 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10138 // CHECK-RV64-NEXT:    ret void
10139 //
test_vsuxseg8ei64_v_i64m1_m(vbool64_t mask,int64_t * base,vuint64m1_t bindex,vint64m1_t v0,vint64m1_t v1,vint64m1_t v2,vint64m1_t v3,vint64m1_t v4,vint64m1_t v5,vint64m1_t v6,vint64m1_t v7,size_t vl)10140 void test_vsuxseg8ei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t v0, vint64m1_t v1, vint64m1_t v2, vint64m1_t v3, vint64m1_t v4, vint64m1_t v5, vint64m1_t v6, vint64m1_t v7, size_t vl) {
10141   return vsuxseg8ei64_v_i64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10142 }
10143 
10144 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m2_m(
10145 // CHECK-RV64-NEXT:  entry:
10146 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10147 // CHECK-RV64-NEXT:    ret void
10148 //
test_vsuxseg2ei64_v_i64m2_m(vbool32_t mask,int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,size_t vl)10149 void test_vsuxseg2ei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, size_t vl) {
10150   return vsuxseg2ei64_v_i64m2_m(mask, base, bindex, v0, v1, vl);
10151 }
10152 
10153 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_i64m2_m(
10154 // CHECK-RV64-NEXT:  entry:
10155 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10156 // CHECK-RV64-NEXT:    ret void
10157 //
test_vsuxseg3ei64_v_i64m2_m(vbool32_t mask,int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,size_t vl)10158 void test_vsuxseg3ei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, size_t vl) {
10159   return vsuxseg3ei64_v_i64m2_m(mask, base, bindex, v0, v1, v2, vl);
10160 }
10161 
10162 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_i64m2_m(
10163 // CHECK-RV64-NEXT:  entry:
10164 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10165 // CHECK-RV64-NEXT:    ret void
10166 //
test_vsuxseg4ei64_v_i64m2_m(vbool32_t mask,int64_t * base,vuint64m2_t bindex,vint64m2_t v0,vint64m2_t v1,vint64m2_t v2,vint64m2_t v3,size_t vl)10167 void test_vsuxseg4ei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t v0, vint64m2_t v1, vint64m2_t v2, vint64m2_t v3, size_t vl) {
10168   return vsuxseg4ei64_v_i64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10169 }
10170 
10171 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_i64m4_m(
10172 // CHECK-RV64-NEXT:  entry:
10173 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10174 // CHECK-RV64-NEXT:    ret void
10175 //
test_vsuxseg2ei64_v_i64m4_m(vbool16_t mask,int64_t * base,vuint64m4_t bindex,vint64m4_t v0,vint64m4_t v1,size_t vl)10176 void test_vsuxseg2ei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t v0, vint64m4_t v1, size_t vl) {
10177   return vsuxseg2ei64_v_i64m4_m(mask, base, bindex, v0, v1, vl);
10178 }
10179 
10180 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf8_m(
10181 // CHECK-RV64-NEXT:  entry:
10182 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10183 // CHECK-RV64-NEXT:    ret void
10184 //
test_vsuxseg2ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)10185 void test_vsuxseg2ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
10186   return vsuxseg2ei8_v_u8mf8_m(mask, base, bindex, v0, v1, vl);
10187 }
10188 
10189 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf8_m(
10190 // CHECK-RV64-NEXT:  entry:
10191 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10192 // CHECK-RV64-NEXT:    ret void
10193 //
test_vsuxseg3ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)10194 void test_vsuxseg3ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
10195   return vsuxseg3ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, vl);
10196 }
10197 
10198 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf8_m(
10199 // CHECK-RV64-NEXT:  entry:
10200 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10201 // CHECK-RV64-NEXT:    ret void
10202 //
test_vsuxseg4ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)10203 void test_vsuxseg4ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
10204   return vsuxseg4ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
10205 }
10206 
10207 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf8_m(
10208 // CHECK-RV64-NEXT:  entry:
10209 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10210 // CHECK-RV64-NEXT:    ret void
10211 //
test_vsuxseg5ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)10212 void test_vsuxseg5ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
10213   return vsuxseg5ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10214 }
10215 
10216 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf8_m(
10217 // CHECK-RV64-NEXT:  entry:
10218 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10219 // CHECK-RV64-NEXT:    ret void
10220 //
test_vsuxseg6ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)10221 void test_vsuxseg6ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
10222   return vsuxseg6ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10223 }
10224 
10225 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf8_m(
10226 // CHECK-RV64-NEXT:  entry:
10227 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10228 // CHECK-RV64-NEXT:    ret void
10229 //
test_vsuxseg7ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)10230 void test_vsuxseg7ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
10231   return vsuxseg7ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10232 }
10233 
10234 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf8_m(
10235 // CHECK-RV64-NEXT:  entry:
10236 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10237 // CHECK-RV64-NEXT:    ret void
10238 //
test_vsuxseg8ei8_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint8mf8_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)10239 void test_vsuxseg8ei8_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
10240   return vsuxseg8ei8_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10241 }
10242 
10243 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf4_m(
10244 // CHECK-RV64-NEXT:  entry:
10245 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10246 // CHECK-RV64-NEXT:    ret void
10247 //
test_vsuxseg2ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)10248 void test_vsuxseg2ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
10249   return vsuxseg2ei8_v_u8mf4_m(mask, base, bindex, v0, v1, vl);
10250 }
10251 
10252 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf4_m(
10253 // CHECK-RV64-NEXT:  entry:
10254 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10255 // CHECK-RV64-NEXT:    ret void
10256 //
test_vsuxseg3ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)10257 void test_vsuxseg3ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
10258   return vsuxseg3ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, vl);
10259 }
10260 
10261 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf4_m(
10262 // CHECK-RV64-NEXT:  entry:
10263 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10264 // CHECK-RV64-NEXT:    ret void
10265 //
test_vsuxseg4ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)10266 void test_vsuxseg4ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
10267   return vsuxseg4ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
10268 }
10269 
10270 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf4_m(
10271 // CHECK-RV64-NEXT:  entry:
10272 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10273 // CHECK-RV64-NEXT:    ret void
10274 //
test_vsuxseg5ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)10275 void test_vsuxseg5ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
10276   return vsuxseg5ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10277 }
10278 
10279 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf4_m(
10280 // CHECK-RV64-NEXT:  entry:
10281 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10282 // CHECK-RV64-NEXT:    ret void
10283 //
test_vsuxseg6ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)10284 void test_vsuxseg6ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
10285   return vsuxseg6ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10286 }
10287 
10288 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf4_m(
10289 // CHECK-RV64-NEXT:  entry:
10290 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10291 // CHECK-RV64-NEXT:    ret void
10292 //
test_vsuxseg7ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)10293 void test_vsuxseg7ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
10294   return vsuxseg7ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10295 }
10296 
10297 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf4_m(
10298 // CHECK-RV64-NEXT:  entry:
10299 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10300 // CHECK-RV64-NEXT:    ret void
10301 //
test_vsuxseg8ei8_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint8mf4_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)10302 void test_vsuxseg8ei8_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
10303   return vsuxseg8ei8_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10304 }
10305 
10306 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8mf2_m(
10307 // CHECK-RV64-NEXT:  entry:
10308 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10309 // CHECK-RV64-NEXT:    ret void
10310 //
test_vsuxseg2ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)10311 void test_vsuxseg2ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
10312   return vsuxseg2ei8_v_u8mf2_m(mask, base, bindex, v0, v1, vl);
10313 }
10314 
10315 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8mf2_m(
10316 // CHECK-RV64-NEXT:  entry:
10317 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10318 // CHECK-RV64-NEXT:    ret void
10319 //
test_vsuxseg3ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)10320 void test_vsuxseg3ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
10321   return vsuxseg3ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, vl);
10322 }
10323 
10324 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8mf2_m(
10325 // CHECK-RV64-NEXT:  entry:
10326 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10327 // CHECK-RV64-NEXT:    ret void
10328 //
test_vsuxseg4ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)10329 void test_vsuxseg4ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
10330   return vsuxseg4ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10331 }
10332 
10333 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8mf2_m(
10334 // CHECK-RV64-NEXT:  entry:
10335 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10336 // CHECK-RV64-NEXT:    ret void
10337 //
test_vsuxseg5ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)10338 void test_vsuxseg5ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
10339   return vsuxseg5ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10340 }
10341 
10342 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8mf2_m(
10343 // CHECK-RV64-NEXT:  entry:
10344 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10345 // CHECK-RV64-NEXT:    ret void
10346 //
test_vsuxseg6ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)10347 void test_vsuxseg6ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
10348   return vsuxseg6ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10349 }
10350 
10351 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8mf2_m(
10352 // CHECK-RV64-NEXT:  entry:
10353 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10354 // CHECK-RV64-NEXT:    ret void
10355 //
test_vsuxseg7ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)10356 void test_vsuxseg7ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
10357   return vsuxseg7ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10358 }
10359 
10360 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8mf2_m(
10361 // CHECK-RV64-NEXT:  entry:
10362 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10363 // CHECK-RV64-NEXT:    ret void
10364 //
test_vsuxseg8ei8_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint8mf2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)10365 void test_vsuxseg8ei8_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
10366   return vsuxseg8ei8_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10367 }
10368 
10369 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m1_m(
10370 // CHECK-RV64-NEXT:  entry:
10371 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10372 // CHECK-RV64-NEXT:    ret void
10373 //
test_vsuxseg2ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)10374 void test_vsuxseg2ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
10375   return vsuxseg2ei8_v_u8m1_m(mask, base, bindex, v0, v1, vl);
10376 }
10377 
10378 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8m1_m(
10379 // CHECK-RV64-NEXT:  entry:
10380 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10381 // CHECK-RV64-NEXT:    ret void
10382 //
test_vsuxseg3ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)10383 void test_vsuxseg3ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
10384   return vsuxseg3ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, vl);
10385 }
10386 
10387 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8m1_m(
10388 // CHECK-RV64-NEXT:  entry:
10389 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10390 // CHECK-RV64-NEXT:    ret void
10391 //
test_vsuxseg4ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)10392 void test_vsuxseg4ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
10393   return vsuxseg4ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
10394 }
10395 
10396 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u8m1_m(
10397 // CHECK-RV64-NEXT:  entry:
10398 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10399 // CHECK-RV64-NEXT:    ret void
10400 //
test_vsuxseg5ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)10401 void test_vsuxseg5ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
10402   return vsuxseg5ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10403 }
10404 
10405 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u8m1_m(
10406 // CHECK-RV64-NEXT:  entry:
10407 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10408 // CHECK-RV64-NEXT:    ret void
10409 //
test_vsuxseg6ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)10410 void test_vsuxseg6ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
10411   return vsuxseg6ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10412 }
10413 
10414 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u8m1_m(
10415 // CHECK-RV64-NEXT:  entry:
10416 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10417 // CHECK-RV64-NEXT:    ret void
10418 //
test_vsuxseg7ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)10419 void test_vsuxseg7ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
10420   return vsuxseg7ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10421 }
10422 
10423 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u8m1_m(
10424 // CHECK-RV64-NEXT:  entry:
10425 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10426 // CHECK-RV64-NEXT:    ret void
10427 //
test_vsuxseg8ei8_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint8m1_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)10428 void test_vsuxseg8ei8_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint8m1_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
10429   return vsuxseg8ei8_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10430 }
10431 
10432 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m2_m(
10433 // CHECK-RV64-NEXT:  entry:
10434 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10435 // CHECK-RV64-NEXT:    ret void
10436 //
test_vsuxseg2ei8_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)10437 void test_vsuxseg2ei8_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
10438   return vsuxseg2ei8_v_u8m2_m(mask, base, bindex, v0, v1, vl);
10439 }
10440 
10441 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u8m2_m(
10442 // CHECK-RV64-NEXT:  entry:
10443 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10444 // CHECK-RV64-NEXT:    ret void
10445 //
test_vsuxseg3ei8_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)10446 void test_vsuxseg3ei8_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
10447   return vsuxseg3ei8_v_u8m2_m(mask, base, bindex, v0, v1, v2, vl);
10448 }
10449 
10450 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u8m2_m(
10451 // CHECK-RV64-NEXT:  entry:
10452 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10453 // CHECK-RV64-NEXT:    ret void
10454 //
test_vsuxseg4ei8_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint8m2_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)10455 void test_vsuxseg4ei8_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint8m2_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
10456   return vsuxseg4ei8_v_u8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10457 }
10458 
10459 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u8m4_m(
10460 // CHECK-RV64-NEXT:  entry:
10461 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i8> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10462 // CHECK-RV64-NEXT:    ret void
10463 //
test_vsuxseg2ei8_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint8m4_t bindex,vuint8m4_t v0,vuint8m4_t v1,size_t vl)10464 void test_vsuxseg2ei8_v_u8m4_m (vbool2_t mask, uint8_t *base, vuint8m4_t bindex, vuint8m4_t v0, vuint8m4_t v1, size_t vl) {
10465   return vsuxseg2ei8_v_u8m4_m(mask, base, bindex, v0, v1, vl);
10466 }
10467 
10468 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf8_m(
10469 // CHECK-RV64-NEXT:  entry:
10470 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10471 // CHECK-RV64-NEXT:    ret void
10472 //
test_vsuxseg2ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)10473 void test_vsuxseg2ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
10474   return vsuxseg2ei16_v_u8mf8_m(mask, base, bindex, v0, v1, vl);
10475 }
10476 
10477 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf8_m(
10478 // CHECK-RV64-NEXT:  entry:
10479 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10480 // CHECK-RV64-NEXT:    ret void
10481 //
test_vsuxseg3ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)10482 void test_vsuxseg3ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
10483   return vsuxseg3ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, vl);
10484 }
10485 
10486 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf8_m(
10487 // CHECK-RV64-NEXT:  entry:
10488 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10489 // CHECK-RV64-NEXT:    ret void
10490 //
test_vsuxseg4ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)10491 void test_vsuxseg4ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
10492   return vsuxseg4ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
10493 }
10494 
10495 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf8_m(
10496 // CHECK-RV64-NEXT:  entry:
10497 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10498 // CHECK-RV64-NEXT:    ret void
10499 //
test_vsuxseg5ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)10500 void test_vsuxseg5ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
10501   return vsuxseg5ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10502 }
10503 
10504 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf8_m(
10505 // CHECK-RV64-NEXT:  entry:
10506 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10507 // CHECK-RV64-NEXT:    ret void
10508 //
test_vsuxseg6ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)10509 void test_vsuxseg6ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
10510   return vsuxseg6ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10511 }
10512 
10513 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf8_m(
10514 // CHECK-RV64-NEXT:  entry:
10515 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10516 // CHECK-RV64-NEXT:    ret void
10517 //
test_vsuxseg7ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)10518 void test_vsuxseg7ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
10519   return vsuxseg7ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10520 }
10521 
10522 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf8_m(
10523 // CHECK-RV64-NEXT:  entry:
10524 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i16.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10525 // CHECK-RV64-NEXT:    ret void
10526 //
test_vsuxseg8ei16_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint16mf4_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)10527 void test_vsuxseg8ei16_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
10528   return vsuxseg8ei16_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10529 }
10530 
10531 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf4_m(
10532 // CHECK-RV64-NEXT:  entry:
10533 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10534 // CHECK-RV64-NEXT:    ret void
10535 //
test_vsuxseg2ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)10536 void test_vsuxseg2ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
10537   return vsuxseg2ei16_v_u8mf4_m(mask, base, bindex, v0, v1, vl);
10538 }
10539 
10540 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf4_m(
10541 // CHECK-RV64-NEXT:  entry:
10542 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10543 // CHECK-RV64-NEXT:    ret void
10544 //
test_vsuxseg3ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)10545 void test_vsuxseg3ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
10546   return vsuxseg3ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, vl);
10547 }
10548 
10549 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf4_m(
10550 // CHECK-RV64-NEXT:  entry:
10551 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10552 // CHECK-RV64-NEXT:    ret void
10553 //
test_vsuxseg4ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)10554 void test_vsuxseg4ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
10555   return vsuxseg4ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
10556 }
10557 
10558 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf4_m(
10559 // CHECK-RV64-NEXT:  entry:
10560 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10561 // CHECK-RV64-NEXT:    ret void
10562 //
test_vsuxseg5ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)10563 void test_vsuxseg5ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
10564   return vsuxseg5ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10565 }
10566 
10567 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf4_m(
10568 // CHECK-RV64-NEXT:  entry:
10569 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10570 // CHECK-RV64-NEXT:    ret void
10571 //
test_vsuxseg6ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)10572 void test_vsuxseg6ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
10573   return vsuxseg6ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10574 }
10575 
10576 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf4_m(
10577 // CHECK-RV64-NEXT:  entry:
10578 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10579 // CHECK-RV64-NEXT:    ret void
10580 //
test_vsuxseg7ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)10581 void test_vsuxseg7ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
10582   return vsuxseg7ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10583 }
10584 
10585 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf4_m(
10586 // CHECK-RV64-NEXT:  entry:
10587 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i16.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10588 // CHECK-RV64-NEXT:    ret void
10589 //
test_vsuxseg8ei16_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint16mf2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)10590 void test_vsuxseg8ei16_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
10591   return vsuxseg8ei16_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10592 }
10593 
10594 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8mf2_m(
10595 // CHECK-RV64-NEXT:  entry:
10596 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10597 // CHECK-RV64-NEXT:    ret void
10598 //
test_vsuxseg2ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)10599 void test_vsuxseg2ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
10600   return vsuxseg2ei16_v_u8mf2_m(mask, base, bindex, v0, v1, vl);
10601 }
10602 
10603 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8mf2_m(
10604 // CHECK-RV64-NEXT:  entry:
10605 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10606 // CHECK-RV64-NEXT:    ret void
10607 //
test_vsuxseg3ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)10608 void test_vsuxseg3ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
10609   return vsuxseg3ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, vl);
10610 }
10611 
10612 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8mf2_m(
10613 // CHECK-RV64-NEXT:  entry:
10614 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10615 // CHECK-RV64-NEXT:    ret void
10616 //
test_vsuxseg4ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)10617 void test_vsuxseg4ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
10618   return vsuxseg4ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10619 }
10620 
10621 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8mf2_m(
10622 // CHECK-RV64-NEXT:  entry:
10623 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10624 // CHECK-RV64-NEXT:    ret void
10625 //
test_vsuxseg5ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)10626 void test_vsuxseg5ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
10627   return vsuxseg5ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10628 }
10629 
10630 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8mf2_m(
10631 // CHECK-RV64-NEXT:  entry:
10632 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10633 // CHECK-RV64-NEXT:    ret void
10634 //
test_vsuxseg6ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)10635 void test_vsuxseg6ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
10636   return vsuxseg6ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10637 }
10638 
10639 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8mf2_m(
10640 // CHECK-RV64-NEXT:  entry:
10641 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10642 // CHECK-RV64-NEXT:    ret void
10643 //
test_vsuxseg7ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)10644 void test_vsuxseg7ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
10645   return vsuxseg7ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10646 }
10647 
10648 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8mf2_m(
10649 // CHECK-RV64-NEXT:  entry:
10650 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i16.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10651 // CHECK-RV64-NEXT:    ret void
10652 //
test_vsuxseg8ei16_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint16m1_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)10653 void test_vsuxseg8ei16_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
10654   return vsuxseg8ei16_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10655 }
10656 
10657 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m1_m(
10658 // CHECK-RV64-NEXT:  entry:
10659 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10660 // CHECK-RV64-NEXT:    ret void
10661 //
test_vsuxseg2ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)10662 void test_vsuxseg2ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
10663   return vsuxseg2ei16_v_u8m1_m(mask, base, bindex, v0, v1, vl);
10664 }
10665 
10666 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8m1_m(
10667 // CHECK-RV64-NEXT:  entry:
10668 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10669 // CHECK-RV64-NEXT:    ret void
10670 //
test_vsuxseg3ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)10671 void test_vsuxseg3ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
10672   return vsuxseg3ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, vl);
10673 }
10674 
10675 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8m1_m(
10676 // CHECK-RV64-NEXT:  entry:
10677 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10678 // CHECK-RV64-NEXT:    ret void
10679 //
test_vsuxseg4ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)10680 void test_vsuxseg4ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
10681   return vsuxseg4ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
10682 }
10683 
10684 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u8m1_m(
10685 // CHECK-RV64-NEXT:  entry:
10686 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10687 // CHECK-RV64-NEXT:    ret void
10688 //
test_vsuxseg5ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)10689 void test_vsuxseg5ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
10690   return vsuxseg5ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10691 }
10692 
10693 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u8m1_m(
10694 // CHECK-RV64-NEXT:  entry:
10695 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10696 // CHECK-RV64-NEXT:    ret void
10697 //
test_vsuxseg6ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)10698 void test_vsuxseg6ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
10699   return vsuxseg6ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10700 }
10701 
10702 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u8m1_m(
10703 // CHECK-RV64-NEXT:  entry:
10704 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10705 // CHECK-RV64-NEXT:    ret void
10706 //
test_vsuxseg7ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)10707 void test_vsuxseg7ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
10708   return vsuxseg7ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10709 }
10710 
10711 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u8m1_m(
10712 // CHECK-RV64-NEXT:  entry:
10713 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i16.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10714 // CHECK-RV64-NEXT:    ret void
10715 //
test_vsuxseg8ei16_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint16m2_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)10716 void test_vsuxseg8ei16_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
10717   return vsuxseg8ei16_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10718 }
10719 
10720 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m2_m(
10721 // CHECK-RV64-NEXT:  entry:
10722 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10723 // CHECK-RV64-NEXT:    ret void
10724 //
test_vsuxseg2ei16_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)10725 void test_vsuxseg2ei16_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
10726   return vsuxseg2ei16_v_u8m2_m(mask, base, bindex, v0, v1, vl);
10727 }
10728 
10729 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u8m2_m(
10730 // CHECK-RV64-NEXT:  entry:
10731 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10732 // CHECK-RV64-NEXT:    ret void
10733 //
test_vsuxseg3ei16_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)10734 void test_vsuxseg3ei16_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
10735   return vsuxseg3ei16_v_u8m2_m(mask, base, bindex, v0, v1, v2, vl);
10736 }
10737 
10738 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u8m2_m(
10739 // CHECK-RV64-NEXT:  entry:
10740 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10741 // CHECK-RV64-NEXT:    ret void
10742 //
test_vsuxseg4ei16_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint16m4_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)10743 void test_vsuxseg4ei16_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint16m4_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
10744   return vsuxseg4ei16_v_u8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10745 }
10746 
10747 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u8m4_m(
10748 // CHECK-RV64-NEXT:  entry:
10749 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv32i8.nxv32i16.i64(<vscale x 32 x i8> [[V0:%.*]], <vscale x 32 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10750 // CHECK-RV64-NEXT:    ret void
10751 //
test_vsuxseg2ei16_v_u8m4_m(vbool2_t mask,uint8_t * base,vuint16m8_t bindex,vuint8m4_t v0,vuint8m4_t v1,size_t vl)10752 void test_vsuxseg2ei16_v_u8m4_m (vbool2_t mask, uint8_t *base, vuint16m8_t bindex, vuint8m4_t v0, vuint8m4_t v1, size_t vl) {
10753   return vsuxseg2ei16_v_u8m4_m(mask, base, bindex, v0, v1, vl);
10754 }
10755 
10756 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf8_m(
10757 // CHECK-RV64-NEXT:  entry:
10758 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10759 // CHECK-RV64-NEXT:    ret void
10760 //
test_vsuxseg2ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)10761 void test_vsuxseg2ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
10762   return vsuxseg2ei32_v_u8mf8_m(mask, base, bindex, v0, v1, vl);
10763 }
10764 
10765 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf8_m(
10766 // CHECK-RV64-NEXT:  entry:
10767 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10768 // CHECK-RV64-NEXT:    ret void
10769 //
test_vsuxseg3ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)10770 void test_vsuxseg3ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
10771   return vsuxseg3ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, vl);
10772 }
10773 
10774 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf8_m(
10775 // CHECK-RV64-NEXT:  entry:
10776 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10777 // CHECK-RV64-NEXT:    ret void
10778 //
test_vsuxseg4ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)10779 void test_vsuxseg4ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
10780   return vsuxseg4ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
10781 }
10782 
10783 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf8_m(
10784 // CHECK-RV64-NEXT:  entry:
10785 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10786 // CHECK-RV64-NEXT:    ret void
10787 //
test_vsuxseg5ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)10788 void test_vsuxseg5ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
10789   return vsuxseg5ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10790 }
10791 
10792 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf8_m(
10793 // CHECK-RV64-NEXT:  entry:
10794 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10795 // CHECK-RV64-NEXT:    ret void
10796 //
test_vsuxseg6ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)10797 void test_vsuxseg6ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
10798   return vsuxseg6ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10799 }
10800 
10801 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf8_m(
10802 // CHECK-RV64-NEXT:  entry:
10803 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10804 // CHECK-RV64-NEXT:    ret void
10805 //
test_vsuxseg7ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)10806 void test_vsuxseg7ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
10807   return vsuxseg7ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10808 }
10809 
10810 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf8_m(
10811 // CHECK-RV64-NEXT:  entry:
10812 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i32.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10813 // CHECK-RV64-NEXT:    ret void
10814 //
test_vsuxseg8ei32_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint32mf2_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)10815 void test_vsuxseg8ei32_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
10816   return vsuxseg8ei32_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10817 }
10818 
10819 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf4_m(
10820 // CHECK-RV64-NEXT:  entry:
10821 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10822 // CHECK-RV64-NEXT:    ret void
10823 //
test_vsuxseg2ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)10824 void test_vsuxseg2ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
10825   return vsuxseg2ei32_v_u8mf4_m(mask, base, bindex, v0, v1, vl);
10826 }
10827 
10828 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf4_m(
10829 // CHECK-RV64-NEXT:  entry:
10830 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10831 // CHECK-RV64-NEXT:    ret void
10832 //
test_vsuxseg3ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)10833 void test_vsuxseg3ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
10834   return vsuxseg3ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, vl);
10835 }
10836 
10837 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf4_m(
10838 // CHECK-RV64-NEXT:  entry:
10839 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10840 // CHECK-RV64-NEXT:    ret void
10841 //
test_vsuxseg4ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)10842 void test_vsuxseg4ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
10843   return vsuxseg4ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
10844 }
10845 
10846 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf4_m(
10847 // CHECK-RV64-NEXT:  entry:
10848 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10849 // CHECK-RV64-NEXT:    ret void
10850 //
test_vsuxseg5ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)10851 void test_vsuxseg5ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
10852   return vsuxseg5ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10853 }
10854 
10855 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf4_m(
10856 // CHECK-RV64-NEXT:  entry:
10857 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10858 // CHECK-RV64-NEXT:    ret void
10859 //
test_vsuxseg6ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)10860 void test_vsuxseg6ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
10861   return vsuxseg6ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10862 }
10863 
10864 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf4_m(
10865 // CHECK-RV64-NEXT:  entry:
10866 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10867 // CHECK-RV64-NEXT:    ret void
10868 //
test_vsuxseg7ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)10869 void test_vsuxseg7ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
10870   return vsuxseg7ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10871 }
10872 
10873 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf4_m(
10874 // CHECK-RV64-NEXT:  entry:
10875 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i32.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10876 // CHECK-RV64-NEXT:    ret void
10877 //
test_vsuxseg8ei32_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint32m1_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)10878 void test_vsuxseg8ei32_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint32m1_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
10879   return vsuxseg8ei32_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10880 }
10881 
10882 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8mf2_m(
10883 // CHECK-RV64-NEXT:  entry:
10884 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10885 // CHECK-RV64-NEXT:    ret void
10886 //
test_vsuxseg2ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)10887 void test_vsuxseg2ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
10888   return vsuxseg2ei32_v_u8mf2_m(mask, base, bindex, v0, v1, vl);
10889 }
10890 
10891 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8mf2_m(
10892 // CHECK-RV64-NEXT:  entry:
10893 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10894 // CHECK-RV64-NEXT:    ret void
10895 //
test_vsuxseg3ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)10896 void test_vsuxseg3ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
10897   return vsuxseg3ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, vl);
10898 }
10899 
10900 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8mf2_m(
10901 // CHECK-RV64-NEXT:  entry:
10902 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10903 // CHECK-RV64-NEXT:    ret void
10904 //
test_vsuxseg4ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)10905 void test_vsuxseg4ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
10906   return vsuxseg4ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
10907 }
10908 
10909 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8mf2_m(
10910 // CHECK-RV64-NEXT:  entry:
10911 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10912 // CHECK-RV64-NEXT:    ret void
10913 //
test_vsuxseg5ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)10914 void test_vsuxseg5ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
10915   return vsuxseg5ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10916 }
10917 
10918 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8mf2_m(
10919 // CHECK-RV64-NEXT:  entry:
10920 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10921 // CHECK-RV64-NEXT:    ret void
10922 //
test_vsuxseg6ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)10923 void test_vsuxseg6ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
10924   return vsuxseg6ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10925 }
10926 
10927 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8mf2_m(
10928 // CHECK-RV64-NEXT:  entry:
10929 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10930 // CHECK-RV64-NEXT:    ret void
10931 //
test_vsuxseg7ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)10932 void test_vsuxseg7ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
10933   return vsuxseg7ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10934 }
10935 
10936 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8mf2_m(
10937 // CHECK-RV64-NEXT:  entry:
10938 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i32.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10939 // CHECK-RV64-NEXT:    ret void
10940 //
test_vsuxseg8ei32_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint32m2_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)10941 void test_vsuxseg8ei32_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint32m2_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
10942   return vsuxseg8ei32_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
10943 }
10944 
10945 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8m1_m(
10946 // CHECK-RV64-NEXT:  entry:
10947 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10948 // CHECK-RV64-NEXT:    ret void
10949 //
test_vsuxseg2ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)10950 void test_vsuxseg2ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
10951   return vsuxseg2ei32_v_u8m1_m(mask, base, bindex, v0, v1, vl);
10952 }
10953 
10954 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8m1_m(
10955 // CHECK-RV64-NEXT:  entry:
10956 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10957 // CHECK-RV64-NEXT:    ret void
10958 //
test_vsuxseg3ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)10959 void test_vsuxseg3ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
10960   return vsuxseg3ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, vl);
10961 }
10962 
10963 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8m1_m(
10964 // CHECK-RV64-NEXT:  entry:
10965 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10966 // CHECK-RV64-NEXT:    ret void
10967 //
test_vsuxseg4ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)10968 void test_vsuxseg4ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
10969   return vsuxseg4ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
10970 }
10971 
10972 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u8m1_m(
10973 // CHECK-RV64-NEXT:  entry:
10974 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10975 // CHECK-RV64-NEXT:    ret void
10976 //
test_vsuxseg5ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)10977 void test_vsuxseg5ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
10978   return vsuxseg5ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
10979 }
10980 
10981 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u8m1_m(
10982 // CHECK-RV64-NEXT:  entry:
10983 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10984 // CHECK-RV64-NEXT:    ret void
10985 //
test_vsuxseg6ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)10986 void test_vsuxseg6ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
10987   return vsuxseg6ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
10988 }
10989 
10990 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u8m1_m(
10991 // CHECK-RV64-NEXT:  entry:
10992 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
10993 // CHECK-RV64-NEXT:    ret void
10994 //
test_vsuxseg7ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)10995 void test_vsuxseg7ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
10996   return vsuxseg7ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
10997 }
10998 
10999 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u8m1_m(
11000 // CHECK-RV64-NEXT:  entry:
11001 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i32.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11002 // CHECK-RV64-NEXT:    ret void
11003 //
test_vsuxseg8ei32_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint32m4_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)11004 void test_vsuxseg8ei32_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint32m4_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
11005   return vsuxseg8ei32_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11006 }
11007 
11008 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u8m2_m(
11009 // CHECK-RV64-NEXT:  entry:
11010 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11011 // CHECK-RV64-NEXT:    ret void
11012 //
test_vsuxseg2ei32_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,size_t vl)11013 void test_vsuxseg2ei32_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, size_t vl) {
11014   return vsuxseg2ei32_v_u8m2_m(mask, base, bindex, v0, v1, vl);
11015 }
11016 
11017 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u8m2_m(
11018 // CHECK-RV64-NEXT:  entry:
11019 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11020 // CHECK-RV64-NEXT:    ret void
11021 //
test_vsuxseg3ei32_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,size_t vl)11022 void test_vsuxseg3ei32_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, size_t vl) {
11023   return vsuxseg3ei32_v_u8m2_m(mask, base, bindex, v0, v1, v2, vl);
11024 }
11025 
11026 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u8m2_m(
11027 // CHECK-RV64-NEXT:  entry:
11028 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32.i64(<vscale x 16 x i8> [[V0:%.*]], <vscale x 16 x i8> [[V1:%.*]], <vscale x 16 x i8> [[V2:%.*]], <vscale x 16 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11029 // CHECK-RV64-NEXT:    ret void
11030 //
test_vsuxseg4ei32_v_u8m2_m(vbool4_t mask,uint8_t * base,vuint32m8_t bindex,vuint8m2_t v0,vuint8m2_t v1,vuint8m2_t v2,vuint8m2_t v3,size_t vl)11031 void test_vsuxseg4ei32_v_u8m2_m (vbool4_t mask, uint8_t *base, vuint32m8_t bindex, vuint8m2_t v0, vuint8m2_t v1, vuint8m2_t v2, vuint8m2_t v3, size_t vl) {
11032   return vsuxseg4ei32_v_u8m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11033 }
11034 
11035 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf8_m(
11036 // CHECK-RV64-NEXT:  entry:
11037 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11038 // CHECK-RV64-NEXT:    ret void
11039 //
test_vsuxseg2ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,size_t vl)11040 void test_vsuxseg2ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, size_t vl) {
11041   return vsuxseg2ei64_v_u8mf8_m(mask, base, bindex, v0, v1, vl);
11042 }
11043 
11044 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf8_m(
11045 // CHECK-RV64-NEXT:  entry:
11046 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11047 // CHECK-RV64-NEXT:    ret void
11048 //
test_vsuxseg3ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,size_t vl)11049 void test_vsuxseg3ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, size_t vl) {
11050   return vsuxseg3ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, vl);
11051 }
11052 
11053 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf8_m(
11054 // CHECK-RV64-NEXT:  entry:
11055 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11056 // CHECK-RV64-NEXT:    ret void
11057 //
test_vsuxseg4ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,size_t vl)11058 void test_vsuxseg4ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, size_t vl) {
11059   return vsuxseg4ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, vl);
11060 }
11061 
11062 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf8_m(
11063 // CHECK-RV64-NEXT:  entry:
11064 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11065 // CHECK-RV64-NEXT:    ret void
11066 //
test_vsuxseg5ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,size_t vl)11067 void test_vsuxseg5ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, size_t vl) {
11068   return vsuxseg5ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11069 }
11070 
11071 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf8_m(
11072 // CHECK-RV64-NEXT:  entry:
11073 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11074 // CHECK-RV64-NEXT:    ret void
11075 //
test_vsuxseg6ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,size_t vl)11076 void test_vsuxseg6ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, size_t vl) {
11077   return vsuxseg6ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11078 }
11079 
11080 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf8_m(
11081 // CHECK-RV64-NEXT:  entry:
11082 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11083 // CHECK-RV64-NEXT:    ret void
11084 //
test_vsuxseg7ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,size_t vl)11085 void test_vsuxseg7ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, size_t vl) {
11086   return vsuxseg7ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11087 }
11088 
11089 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf8_m(
11090 // CHECK-RV64-NEXT:  entry:
11091 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i8.nxv1i64.i64(<vscale x 1 x i8> [[V0:%.*]], <vscale x 1 x i8> [[V1:%.*]], <vscale x 1 x i8> [[V2:%.*]], <vscale x 1 x i8> [[V3:%.*]], <vscale x 1 x i8> [[V4:%.*]], <vscale x 1 x i8> [[V5:%.*]], <vscale x 1 x i8> [[V6:%.*]], <vscale x 1 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11092 // CHECK-RV64-NEXT:    ret void
11093 //
test_vsuxseg8ei64_v_u8mf8_m(vbool64_t mask,uint8_t * base,vuint64m1_t bindex,vuint8mf8_t v0,vuint8mf8_t v1,vuint8mf8_t v2,vuint8mf8_t v3,vuint8mf8_t v4,vuint8mf8_t v5,vuint8mf8_t v6,vuint8mf8_t v7,size_t vl)11094 void test_vsuxseg8ei64_v_u8mf8_m (vbool64_t mask, uint8_t *base, vuint64m1_t bindex, vuint8mf8_t v0, vuint8mf8_t v1, vuint8mf8_t v2, vuint8mf8_t v3, vuint8mf8_t v4, vuint8mf8_t v5, vuint8mf8_t v6, vuint8mf8_t v7, size_t vl) {
11095   return vsuxseg8ei64_v_u8mf8_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11096 }
11097 
11098 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf4_m(
11099 // CHECK-RV64-NEXT:  entry:
11100 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11101 // CHECK-RV64-NEXT:    ret void
11102 //
test_vsuxseg2ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,size_t vl)11103 void test_vsuxseg2ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, size_t vl) {
11104   return vsuxseg2ei64_v_u8mf4_m(mask, base, bindex, v0, v1, vl);
11105 }
11106 
11107 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf4_m(
11108 // CHECK-RV64-NEXT:  entry:
11109 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11110 // CHECK-RV64-NEXT:    ret void
11111 //
test_vsuxseg3ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,size_t vl)11112 void test_vsuxseg3ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, size_t vl) {
11113   return vsuxseg3ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, vl);
11114 }
11115 
11116 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf4_m(
11117 // CHECK-RV64-NEXT:  entry:
11118 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11119 // CHECK-RV64-NEXT:    ret void
11120 //
test_vsuxseg4ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,size_t vl)11121 void test_vsuxseg4ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, size_t vl) {
11122   return vsuxseg4ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
11123 }
11124 
11125 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf4_m(
11126 // CHECK-RV64-NEXT:  entry:
11127 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11128 // CHECK-RV64-NEXT:    ret void
11129 //
test_vsuxseg5ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,size_t vl)11130 void test_vsuxseg5ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, size_t vl) {
11131   return vsuxseg5ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11132 }
11133 
11134 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf4_m(
11135 // CHECK-RV64-NEXT:  entry:
11136 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11137 // CHECK-RV64-NEXT:    ret void
11138 //
test_vsuxseg6ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,size_t vl)11139 void test_vsuxseg6ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, size_t vl) {
11140   return vsuxseg6ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11141 }
11142 
11143 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf4_m(
11144 // CHECK-RV64-NEXT:  entry:
11145 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11146 // CHECK-RV64-NEXT:    ret void
11147 //
test_vsuxseg7ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,size_t vl)11148 void test_vsuxseg7ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, size_t vl) {
11149   return vsuxseg7ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11150 }
11151 
11152 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf4_m(
11153 // CHECK-RV64-NEXT:  entry:
11154 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i8.nxv2i64.i64(<vscale x 2 x i8> [[V0:%.*]], <vscale x 2 x i8> [[V1:%.*]], <vscale x 2 x i8> [[V2:%.*]], <vscale x 2 x i8> [[V3:%.*]], <vscale x 2 x i8> [[V4:%.*]], <vscale x 2 x i8> [[V5:%.*]], <vscale x 2 x i8> [[V6:%.*]], <vscale x 2 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11155 // CHECK-RV64-NEXT:    ret void
11156 //
test_vsuxseg8ei64_v_u8mf4_m(vbool32_t mask,uint8_t * base,vuint64m2_t bindex,vuint8mf4_t v0,vuint8mf4_t v1,vuint8mf4_t v2,vuint8mf4_t v3,vuint8mf4_t v4,vuint8mf4_t v5,vuint8mf4_t v6,vuint8mf4_t v7,size_t vl)11157 void test_vsuxseg8ei64_v_u8mf4_m (vbool32_t mask, uint8_t *base, vuint64m2_t bindex, vuint8mf4_t v0, vuint8mf4_t v1, vuint8mf4_t v2, vuint8mf4_t v3, vuint8mf4_t v4, vuint8mf4_t v5, vuint8mf4_t v6, vuint8mf4_t v7, size_t vl) {
11158   return vsuxseg8ei64_v_u8mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11159 }
11160 
11161 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8mf2_m(
11162 // CHECK-RV64-NEXT:  entry:
11163 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11164 // CHECK-RV64-NEXT:    ret void
11165 //
test_vsuxseg2ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,size_t vl)11166 void test_vsuxseg2ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, size_t vl) {
11167   return vsuxseg2ei64_v_u8mf2_m(mask, base, bindex, v0, v1, vl);
11168 }
11169 
11170 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8mf2_m(
11171 // CHECK-RV64-NEXT:  entry:
11172 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11173 // CHECK-RV64-NEXT:    ret void
11174 //
test_vsuxseg3ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,size_t vl)11175 void test_vsuxseg3ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, size_t vl) {
11176   return vsuxseg3ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, vl);
11177 }
11178 
11179 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8mf2_m(
11180 // CHECK-RV64-NEXT:  entry:
11181 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11182 // CHECK-RV64-NEXT:    ret void
11183 //
test_vsuxseg4ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,size_t vl)11184 void test_vsuxseg4ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, size_t vl) {
11185   return vsuxseg4ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11186 }
11187 
11188 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8mf2_m(
11189 // CHECK-RV64-NEXT:  entry:
11190 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11191 // CHECK-RV64-NEXT:    ret void
11192 //
test_vsuxseg5ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,size_t vl)11193 void test_vsuxseg5ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, size_t vl) {
11194   return vsuxseg5ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11195 }
11196 
11197 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8mf2_m(
11198 // CHECK-RV64-NEXT:  entry:
11199 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11200 // CHECK-RV64-NEXT:    ret void
11201 //
test_vsuxseg6ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,size_t vl)11202 void test_vsuxseg6ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, size_t vl) {
11203   return vsuxseg6ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11204 }
11205 
11206 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8mf2_m(
11207 // CHECK-RV64-NEXT:  entry:
11208 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11209 // CHECK-RV64-NEXT:    ret void
11210 //
test_vsuxseg7ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,size_t vl)11211 void test_vsuxseg7ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, size_t vl) {
11212   return vsuxseg7ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11213 }
11214 
11215 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8mf2_m(
11216 // CHECK-RV64-NEXT:  entry:
11217 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i8.nxv4i64.i64(<vscale x 4 x i8> [[V0:%.*]], <vscale x 4 x i8> [[V1:%.*]], <vscale x 4 x i8> [[V2:%.*]], <vscale x 4 x i8> [[V3:%.*]], <vscale x 4 x i8> [[V4:%.*]], <vscale x 4 x i8> [[V5:%.*]], <vscale x 4 x i8> [[V6:%.*]], <vscale x 4 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11218 // CHECK-RV64-NEXT:    ret void
11219 //
test_vsuxseg8ei64_v_u8mf2_m(vbool16_t mask,uint8_t * base,vuint64m4_t bindex,vuint8mf2_t v0,vuint8mf2_t v1,vuint8mf2_t v2,vuint8mf2_t v3,vuint8mf2_t v4,vuint8mf2_t v5,vuint8mf2_t v6,vuint8mf2_t v7,size_t vl)11220 void test_vsuxseg8ei64_v_u8mf2_m (vbool16_t mask, uint8_t *base, vuint64m4_t bindex, vuint8mf2_t v0, vuint8mf2_t v1, vuint8mf2_t v2, vuint8mf2_t v3, vuint8mf2_t v4, vuint8mf2_t v5, vuint8mf2_t v6, vuint8mf2_t v7, size_t vl) {
11221   return vsuxseg8ei64_v_u8mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11222 }
11223 
11224 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u8m1_m(
11225 // CHECK-RV64-NEXT:  entry:
11226 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11227 // CHECK-RV64-NEXT:    ret void
11228 //
test_vsuxseg2ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,size_t vl)11229 void test_vsuxseg2ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, size_t vl) {
11230   return vsuxseg2ei64_v_u8m1_m(mask, base, bindex, v0, v1, vl);
11231 }
11232 
11233 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u8m1_m(
11234 // CHECK-RV64-NEXT:  entry:
11235 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11236 // CHECK-RV64-NEXT:    ret void
11237 //
test_vsuxseg3ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,size_t vl)11238 void test_vsuxseg3ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, size_t vl) {
11239   return vsuxseg3ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, vl);
11240 }
11241 
11242 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u8m1_m(
11243 // CHECK-RV64-NEXT:  entry:
11244 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11245 // CHECK-RV64-NEXT:    ret void
11246 //
test_vsuxseg4ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,size_t vl)11247 void test_vsuxseg4ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, size_t vl) {
11248   return vsuxseg4ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
11249 }
11250 
11251 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u8m1_m(
11252 // CHECK-RV64-NEXT:  entry:
11253 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11254 // CHECK-RV64-NEXT:    ret void
11255 //
test_vsuxseg5ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,size_t vl)11256 void test_vsuxseg5ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, size_t vl) {
11257   return vsuxseg5ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11258 }
11259 
11260 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u8m1_m(
11261 // CHECK-RV64-NEXT:  entry:
11262 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11263 // CHECK-RV64-NEXT:    ret void
11264 //
test_vsuxseg6ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,size_t vl)11265 void test_vsuxseg6ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, size_t vl) {
11266   return vsuxseg6ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11267 }
11268 
11269 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u8m1_m(
11270 // CHECK-RV64-NEXT:  entry:
11271 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11272 // CHECK-RV64-NEXT:    ret void
11273 //
test_vsuxseg7ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,size_t vl)11274 void test_vsuxseg7ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, size_t vl) {
11275   return vsuxseg7ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11276 }
11277 
11278 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u8m1_m(
11279 // CHECK-RV64-NEXT:  entry:
11280 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv8i8.nxv8i64.i64(<vscale x 8 x i8> [[V0:%.*]], <vscale x 8 x i8> [[V1:%.*]], <vscale x 8 x i8> [[V2:%.*]], <vscale x 8 x i8> [[V3:%.*]], <vscale x 8 x i8> [[V4:%.*]], <vscale x 8 x i8> [[V5:%.*]], <vscale x 8 x i8> [[V6:%.*]], <vscale x 8 x i8> [[V7:%.*]], i8* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11281 // CHECK-RV64-NEXT:    ret void
11282 //
test_vsuxseg8ei64_v_u8m1_m(vbool8_t mask,uint8_t * base,vuint64m8_t bindex,vuint8m1_t v0,vuint8m1_t v1,vuint8m1_t v2,vuint8m1_t v3,vuint8m1_t v4,vuint8m1_t v5,vuint8m1_t v6,vuint8m1_t v7,size_t vl)11283 void test_vsuxseg8ei64_v_u8m1_m (vbool8_t mask, uint8_t *base, vuint64m8_t bindex, vuint8m1_t v0, vuint8m1_t v1, vuint8m1_t v2, vuint8m1_t v3, vuint8m1_t v4, vuint8m1_t v5, vuint8m1_t v6, vuint8m1_t v7, size_t vl) {
11284   return vsuxseg8ei64_v_u8m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11285 }
11286 
11287 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16mf4_m(
11288 // CHECK-RV64-NEXT:  entry:
11289 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11290 // CHECK-RV64-NEXT:    ret void
11291 //
test_vsuxseg2ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)11292 void test_vsuxseg2ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
11293   return vsuxseg2ei8_v_u16mf4_m(mask, base, bindex, v0, v1, vl);
11294 }
11295 
11296 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16mf4_m(
11297 // CHECK-RV64-NEXT:  entry:
11298 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11299 // CHECK-RV64-NEXT:    ret void
11300 //
test_vsuxseg3ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)11301 void test_vsuxseg3ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
11302   return vsuxseg3ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, vl);
11303 }
11304 
11305 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16mf4_m(
11306 // CHECK-RV64-NEXT:  entry:
11307 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11308 // CHECK-RV64-NEXT:    ret void
11309 //
test_vsuxseg4ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)11310 void test_vsuxseg4ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
11311   return vsuxseg4ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
11312 }
11313 
11314 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16mf4_m(
11315 // CHECK-RV64-NEXT:  entry:
11316 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11317 // CHECK-RV64-NEXT:    ret void
11318 //
test_vsuxseg5ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)11319 void test_vsuxseg5ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
11320   return vsuxseg5ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11321 }
11322 
11323 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16mf4_m(
11324 // CHECK-RV64-NEXT:  entry:
11325 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11326 // CHECK-RV64-NEXT:    ret void
11327 //
test_vsuxseg6ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)11328 void test_vsuxseg6ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
11329   return vsuxseg6ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11330 }
11331 
11332 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16mf4_m(
11333 // CHECK-RV64-NEXT:  entry:
11334 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11335 // CHECK-RV64-NEXT:    ret void
11336 //
test_vsuxseg7ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)11337 void test_vsuxseg7ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
11338   return vsuxseg7ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11339 }
11340 
11341 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16mf4_m(
11342 // CHECK-RV64-NEXT:  entry:
11343 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i8.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11344 // CHECK-RV64-NEXT:    ret void
11345 //
test_vsuxseg8ei8_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint8mf8_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)11346 void test_vsuxseg8ei8_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
11347   return vsuxseg8ei8_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11348 }
11349 
11350 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16mf2_m(
11351 // CHECK-RV64-NEXT:  entry:
11352 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11353 // CHECK-RV64-NEXT:    ret void
11354 //
test_vsuxseg2ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)11355 void test_vsuxseg2ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
11356   return vsuxseg2ei8_v_u16mf2_m(mask, base, bindex, v0, v1, vl);
11357 }
11358 
11359 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16mf2_m(
11360 // CHECK-RV64-NEXT:  entry:
11361 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11362 // CHECK-RV64-NEXT:    ret void
11363 //
test_vsuxseg3ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)11364 void test_vsuxseg3ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
11365   return vsuxseg3ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, vl);
11366 }
11367 
11368 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16mf2_m(
11369 // CHECK-RV64-NEXT:  entry:
11370 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11371 // CHECK-RV64-NEXT:    ret void
11372 //
test_vsuxseg4ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)11373 void test_vsuxseg4ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
11374   return vsuxseg4ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11375 }
11376 
11377 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16mf2_m(
11378 // CHECK-RV64-NEXT:  entry:
11379 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11380 // CHECK-RV64-NEXT:    ret void
11381 //
test_vsuxseg5ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)11382 void test_vsuxseg5ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
11383   return vsuxseg5ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11384 }
11385 
11386 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16mf2_m(
11387 // CHECK-RV64-NEXT:  entry:
11388 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11389 // CHECK-RV64-NEXT:    ret void
11390 //
test_vsuxseg6ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)11391 void test_vsuxseg6ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
11392   return vsuxseg6ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11393 }
11394 
11395 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16mf2_m(
11396 // CHECK-RV64-NEXT:  entry:
11397 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11398 // CHECK-RV64-NEXT:    ret void
11399 //
test_vsuxseg7ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)11400 void test_vsuxseg7ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
11401   return vsuxseg7ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11402 }
11403 
11404 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16mf2_m(
11405 // CHECK-RV64-NEXT:  entry:
11406 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i8.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11407 // CHECK-RV64-NEXT:    ret void
11408 //
test_vsuxseg8ei8_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint8mf4_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)11409 void test_vsuxseg8ei8_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
11410   return vsuxseg8ei8_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11411 }
11412 
11413 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m1_m(
11414 // CHECK-RV64-NEXT:  entry:
11415 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11416 // CHECK-RV64-NEXT:    ret void
11417 //
test_vsuxseg2ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)11418 void test_vsuxseg2ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
11419   return vsuxseg2ei8_v_u16m1_m(mask, base, bindex, v0, v1, vl);
11420 }
11421 
11422 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16m1_m(
11423 // CHECK-RV64-NEXT:  entry:
11424 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11425 // CHECK-RV64-NEXT:    ret void
11426 //
test_vsuxseg3ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)11427 void test_vsuxseg3ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
11428   return vsuxseg3ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, vl);
11429 }
11430 
11431 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16m1_m(
11432 // CHECK-RV64-NEXT:  entry:
11433 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11434 // CHECK-RV64-NEXT:    ret void
11435 //
test_vsuxseg4ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)11436 void test_vsuxseg4ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
11437   return vsuxseg4ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
11438 }
11439 
11440 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u16m1_m(
11441 // CHECK-RV64-NEXT:  entry:
11442 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11443 // CHECK-RV64-NEXT:    ret void
11444 //
test_vsuxseg5ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)11445 void test_vsuxseg5ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
11446   return vsuxseg5ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11447 }
11448 
11449 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u16m1_m(
11450 // CHECK-RV64-NEXT:  entry:
11451 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11452 // CHECK-RV64-NEXT:    ret void
11453 //
test_vsuxseg6ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)11454 void test_vsuxseg6ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
11455   return vsuxseg6ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11456 }
11457 
11458 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u16m1_m(
11459 // CHECK-RV64-NEXT:  entry:
11460 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11461 // CHECK-RV64-NEXT:    ret void
11462 //
test_vsuxseg7ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)11463 void test_vsuxseg7ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
11464   return vsuxseg7ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11465 }
11466 
11467 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u16m1_m(
11468 // CHECK-RV64-NEXT:  entry:
11469 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i8.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11470 // CHECK-RV64-NEXT:    ret void
11471 //
test_vsuxseg8ei8_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint8mf2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)11472 void test_vsuxseg8ei8_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
11473   return vsuxseg8ei8_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11474 }
11475 
11476 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m2_m(
11477 // CHECK-RV64-NEXT:  entry:
11478 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11479 // CHECK-RV64-NEXT:    ret void
11480 //
test_vsuxseg2ei8_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)11481 void test_vsuxseg2ei8_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
11482   return vsuxseg2ei8_v_u16m2_m(mask, base, bindex, v0, v1, vl);
11483 }
11484 
11485 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u16m2_m(
11486 // CHECK-RV64-NEXT:  entry:
11487 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11488 // CHECK-RV64-NEXT:    ret void
11489 //
test_vsuxseg3ei8_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)11490 void test_vsuxseg3ei8_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
11491   return vsuxseg3ei8_v_u16m2_m(mask, base, bindex, v0, v1, v2, vl);
11492 }
11493 
11494 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u16m2_m(
11495 // CHECK-RV64-NEXT:  entry:
11496 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i8.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11497 // CHECK-RV64-NEXT:    ret void
11498 //
test_vsuxseg4ei8_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint8m1_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)11499 void test_vsuxseg4ei8_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint8m1_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
11500   return vsuxseg4ei8_v_u16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11501 }
11502 
11503 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u16m4_m(
11504 // CHECK-RV64-NEXT:  entry:
11505 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i8> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11506 // CHECK-RV64-NEXT:    ret void
11507 //
test_vsuxseg2ei8_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint8m2_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)11508 void test_vsuxseg2ei8_v_u16m4_m (vbool4_t mask, uint16_t *base, vuint8m2_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
11509   return vsuxseg2ei8_v_u16m4_m(mask, base, bindex, v0, v1, vl);
11510 }
11511 
11512 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16mf4_m(
11513 // CHECK-RV64-NEXT:  entry:
11514 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11515 // CHECK-RV64-NEXT:    ret void
11516 //
test_vsuxseg2ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)11517 void test_vsuxseg2ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
11518   return vsuxseg2ei16_v_u16mf4_m(mask, base, bindex, v0, v1, vl);
11519 }
11520 
11521 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16mf4_m(
11522 // CHECK-RV64-NEXT:  entry:
11523 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11524 // CHECK-RV64-NEXT:    ret void
11525 //
test_vsuxseg3ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)11526 void test_vsuxseg3ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
11527   return vsuxseg3ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, vl);
11528 }
11529 
11530 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16mf4_m(
11531 // CHECK-RV64-NEXT:  entry:
11532 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11533 // CHECK-RV64-NEXT:    ret void
11534 //
test_vsuxseg4ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)11535 void test_vsuxseg4ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
11536   return vsuxseg4ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
11537 }
11538 
11539 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16mf4_m(
11540 // CHECK-RV64-NEXT:  entry:
11541 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11542 // CHECK-RV64-NEXT:    ret void
11543 //
test_vsuxseg5ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)11544 void test_vsuxseg5ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
11545   return vsuxseg5ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11546 }
11547 
11548 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16mf4_m(
11549 // CHECK-RV64-NEXT:  entry:
11550 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11551 // CHECK-RV64-NEXT:    ret void
11552 //
test_vsuxseg6ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)11553 void test_vsuxseg6ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
11554   return vsuxseg6ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11555 }
11556 
11557 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16mf4_m(
11558 // CHECK-RV64-NEXT:  entry:
11559 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11560 // CHECK-RV64-NEXT:    ret void
11561 //
test_vsuxseg7ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)11562 void test_vsuxseg7ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
11563   return vsuxseg7ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11564 }
11565 
11566 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16mf4_m(
11567 // CHECK-RV64-NEXT:  entry:
11568 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11569 // CHECK-RV64-NEXT:    ret void
11570 //
test_vsuxseg8ei16_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint16mf4_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)11571 void test_vsuxseg8ei16_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
11572   return vsuxseg8ei16_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11573 }
11574 
11575 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16mf2_m(
11576 // CHECK-RV64-NEXT:  entry:
11577 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11578 // CHECK-RV64-NEXT:    ret void
11579 //
test_vsuxseg2ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)11580 void test_vsuxseg2ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
11581   return vsuxseg2ei16_v_u16mf2_m(mask, base, bindex, v0, v1, vl);
11582 }
11583 
11584 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16mf2_m(
11585 // CHECK-RV64-NEXT:  entry:
11586 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11587 // CHECK-RV64-NEXT:    ret void
11588 //
test_vsuxseg3ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)11589 void test_vsuxseg3ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
11590   return vsuxseg3ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, vl);
11591 }
11592 
11593 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16mf2_m(
11594 // CHECK-RV64-NEXT:  entry:
11595 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11596 // CHECK-RV64-NEXT:    ret void
11597 //
test_vsuxseg4ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)11598 void test_vsuxseg4ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
11599   return vsuxseg4ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11600 }
11601 
11602 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16mf2_m(
11603 // CHECK-RV64-NEXT:  entry:
11604 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11605 // CHECK-RV64-NEXT:    ret void
11606 //
test_vsuxseg5ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)11607 void test_vsuxseg5ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
11608   return vsuxseg5ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11609 }
11610 
11611 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16mf2_m(
11612 // CHECK-RV64-NEXT:  entry:
11613 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11614 // CHECK-RV64-NEXT:    ret void
11615 //
test_vsuxseg6ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)11616 void test_vsuxseg6ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
11617   return vsuxseg6ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11618 }
11619 
11620 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16mf2_m(
11621 // CHECK-RV64-NEXT:  entry:
11622 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11623 // CHECK-RV64-NEXT:    ret void
11624 //
test_vsuxseg7ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)11625 void test_vsuxseg7ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
11626   return vsuxseg7ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11627 }
11628 
11629 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16mf2_m(
11630 // CHECK-RV64-NEXT:  entry:
11631 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11632 // CHECK-RV64-NEXT:    ret void
11633 //
test_vsuxseg8ei16_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint16mf2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)11634 void test_vsuxseg8ei16_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
11635   return vsuxseg8ei16_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11636 }
11637 
11638 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m1_m(
11639 // CHECK-RV64-NEXT:  entry:
11640 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11641 // CHECK-RV64-NEXT:    ret void
11642 //
test_vsuxseg2ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)11643 void test_vsuxseg2ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
11644   return vsuxseg2ei16_v_u16m1_m(mask, base, bindex, v0, v1, vl);
11645 }
11646 
11647 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16m1_m(
11648 // CHECK-RV64-NEXT:  entry:
11649 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11650 // CHECK-RV64-NEXT:    ret void
11651 //
test_vsuxseg3ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)11652 void test_vsuxseg3ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
11653   return vsuxseg3ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, vl);
11654 }
11655 
11656 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16m1_m(
11657 // CHECK-RV64-NEXT:  entry:
11658 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11659 // CHECK-RV64-NEXT:    ret void
11660 //
test_vsuxseg4ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)11661 void test_vsuxseg4ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
11662   return vsuxseg4ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
11663 }
11664 
11665 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u16m1_m(
11666 // CHECK-RV64-NEXT:  entry:
11667 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11668 // CHECK-RV64-NEXT:    ret void
11669 //
test_vsuxseg5ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)11670 void test_vsuxseg5ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
11671   return vsuxseg5ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11672 }
11673 
11674 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u16m1_m(
11675 // CHECK-RV64-NEXT:  entry:
11676 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11677 // CHECK-RV64-NEXT:    ret void
11678 //
test_vsuxseg6ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)11679 void test_vsuxseg6ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
11680   return vsuxseg6ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11681 }
11682 
11683 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u16m1_m(
11684 // CHECK-RV64-NEXT:  entry:
11685 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11686 // CHECK-RV64-NEXT:    ret void
11687 //
test_vsuxseg7ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)11688 void test_vsuxseg7ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
11689   return vsuxseg7ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11690 }
11691 
11692 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u16m1_m(
11693 // CHECK-RV64-NEXT:  entry:
11694 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11695 // CHECK-RV64-NEXT:    ret void
11696 //
test_vsuxseg8ei16_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint16m1_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)11697 void test_vsuxseg8ei16_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
11698   return vsuxseg8ei16_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11699 }
11700 
11701 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m2_m(
11702 // CHECK-RV64-NEXT:  entry:
11703 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11704 // CHECK-RV64-NEXT:    ret void
11705 //
test_vsuxseg2ei16_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)11706 void test_vsuxseg2ei16_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
11707   return vsuxseg2ei16_v_u16m2_m(mask, base, bindex, v0, v1, vl);
11708 }
11709 
11710 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u16m2_m(
11711 // CHECK-RV64-NEXT:  entry:
11712 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11713 // CHECK-RV64-NEXT:    ret void
11714 //
test_vsuxseg3ei16_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)11715 void test_vsuxseg3ei16_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
11716   return vsuxseg3ei16_v_u16m2_m(mask, base, bindex, v0, v1, v2, vl);
11717 }
11718 
11719 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u16m2_m(
11720 // CHECK-RV64-NEXT:  entry:
11721 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11722 // CHECK-RV64-NEXT:    ret void
11723 //
test_vsuxseg4ei16_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint16m2_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)11724 void test_vsuxseg4ei16_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint16m2_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
11725   return vsuxseg4ei16_v_u16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11726 }
11727 
11728 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u16m4_m(
11729 // CHECK-RV64-NEXT:  entry:
11730 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11731 // CHECK-RV64-NEXT:    ret void
11732 //
test_vsuxseg2ei16_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint16m4_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)11733 void test_vsuxseg2ei16_v_u16m4_m (vbool4_t mask, uint16_t *base, vuint16m4_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
11734   return vsuxseg2ei16_v_u16m4_m(mask, base, bindex, v0, v1, vl);
11735 }
11736 
11737 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16mf4_m(
11738 // CHECK-RV64-NEXT:  entry:
11739 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11740 // CHECK-RV64-NEXT:    ret void
11741 //
test_vsuxseg2ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)11742 void test_vsuxseg2ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
11743   return vsuxseg2ei32_v_u16mf4_m(mask, base, bindex, v0, v1, vl);
11744 }
11745 
11746 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16mf4_m(
11747 // CHECK-RV64-NEXT:  entry:
11748 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11749 // CHECK-RV64-NEXT:    ret void
11750 //
test_vsuxseg3ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)11751 void test_vsuxseg3ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
11752   return vsuxseg3ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, vl);
11753 }
11754 
11755 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16mf4_m(
11756 // CHECK-RV64-NEXT:  entry:
11757 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11758 // CHECK-RV64-NEXT:    ret void
11759 //
test_vsuxseg4ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)11760 void test_vsuxseg4ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
11761   return vsuxseg4ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
11762 }
11763 
11764 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16mf4_m(
11765 // CHECK-RV64-NEXT:  entry:
11766 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11767 // CHECK-RV64-NEXT:    ret void
11768 //
test_vsuxseg5ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)11769 void test_vsuxseg5ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
11770   return vsuxseg5ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11771 }
11772 
11773 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16mf4_m(
11774 // CHECK-RV64-NEXT:  entry:
11775 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11776 // CHECK-RV64-NEXT:    ret void
11777 //
test_vsuxseg6ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)11778 void test_vsuxseg6ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
11779   return vsuxseg6ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11780 }
11781 
11782 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16mf4_m(
11783 // CHECK-RV64-NEXT:  entry:
11784 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11785 // CHECK-RV64-NEXT:    ret void
11786 //
test_vsuxseg7ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)11787 void test_vsuxseg7ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
11788   return vsuxseg7ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11789 }
11790 
11791 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16mf4_m(
11792 // CHECK-RV64-NEXT:  entry:
11793 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i32.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11794 // CHECK-RV64-NEXT:    ret void
11795 //
test_vsuxseg8ei32_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint32mf2_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)11796 void test_vsuxseg8ei32_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint32mf2_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
11797   return vsuxseg8ei32_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11798 }
11799 
11800 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16mf2_m(
11801 // CHECK-RV64-NEXT:  entry:
11802 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11803 // CHECK-RV64-NEXT:    ret void
11804 //
test_vsuxseg2ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)11805 void test_vsuxseg2ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
11806   return vsuxseg2ei32_v_u16mf2_m(mask, base, bindex, v0, v1, vl);
11807 }
11808 
11809 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16mf2_m(
11810 // CHECK-RV64-NEXT:  entry:
11811 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11812 // CHECK-RV64-NEXT:    ret void
11813 //
test_vsuxseg3ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)11814 void test_vsuxseg3ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
11815   return vsuxseg3ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, vl);
11816 }
11817 
11818 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16mf2_m(
11819 // CHECK-RV64-NEXT:  entry:
11820 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11821 // CHECK-RV64-NEXT:    ret void
11822 //
test_vsuxseg4ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)11823 void test_vsuxseg4ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
11824   return vsuxseg4ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11825 }
11826 
11827 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16mf2_m(
11828 // CHECK-RV64-NEXT:  entry:
11829 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11830 // CHECK-RV64-NEXT:    ret void
11831 //
test_vsuxseg5ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)11832 void test_vsuxseg5ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
11833   return vsuxseg5ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11834 }
11835 
11836 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16mf2_m(
11837 // CHECK-RV64-NEXT:  entry:
11838 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11839 // CHECK-RV64-NEXT:    ret void
11840 //
test_vsuxseg6ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)11841 void test_vsuxseg6ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
11842   return vsuxseg6ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11843 }
11844 
11845 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16mf2_m(
11846 // CHECK-RV64-NEXT:  entry:
11847 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11848 // CHECK-RV64-NEXT:    ret void
11849 //
test_vsuxseg7ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)11850 void test_vsuxseg7ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
11851   return vsuxseg7ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11852 }
11853 
11854 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16mf2_m(
11855 // CHECK-RV64-NEXT:  entry:
11856 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i32.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11857 // CHECK-RV64-NEXT:    ret void
11858 //
test_vsuxseg8ei32_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint32m1_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)11859 void test_vsuxseg8ei32_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint32m1_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
11860   return vsuxseg8ei32_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11861 }
11862 
11863 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m1_m(
11864 // CHECK-RV64-NEXT:  entry:
11865 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11866 // CHECK-RV64-NEXT:    ret void
11867 //
test_vsuxseg2ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)11868 void test_vsuxseg2ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
11869   return vsuxseg2ei32_v_u16m1_m(mask, base, bindex, v0, v1, vl);
11870 }
11871 
11872 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16m1_m(
11873 // CHECK-RV64-NEXT:  entry:
11874 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11875 // CHECK-RV64-NEXT:    ret void
11876 //
test_vsuxseg3ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)11877 void test_vsuxseg3ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
11878   return vsuxseg3ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, vl);
11879 }
11880 
11881 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16m1_m(
11882 // CHECK-RV64-NEXT:  entry:
11883 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11884 // CHECK-RV64-NEXT:    ret void
11885 //
test_vsuxseg4ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)11886 void test_vsuxseg4ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
11887   return vsuxseg4ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
11888 }
11889 
11890 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u16m1_m(
11891 // CHECK-RV64-NEXT:  entry:
11892 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11893 // CHECK-RV64-NEXT:    ret void
11894 //
test_vsuxseg5ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)11895 void test_vsuxseg5ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
11896   return vsuxseg5ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11897 }
11898 
11899 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u16m1_m(
11900 // CHECK-RV64-NEXT:  entry:
11901 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11902 // CHECK-RV64-NEXT:    ret void
11903 //
test_vsuxseg6ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)11904 void test_vsuxseg6ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
11905   return vsuxseg6ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
11906 }
11907 
11908 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u16m1_m(
11909 // CHECK-RV64-NEXT:  entry:
11910 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11911 // CHECK-RV64-NEXT:    ret void
11912 //
test_vsuxseg7ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)11913 void test_vsuxseg7ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
11914   return vsuxseg7ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
11915 }
11916 
11917 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u16m1_m(
11918 // CHECK-RV64-NEXT:  entry:
11919 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i32.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11920 // CHECK-RV64-NEXT:    ret void
11921 //
test_vsuxseg8ei32_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint32m2_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)11922 void test_vsuxseg8ei32_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint32m2_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
11923   return vsuxseg8ei32_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
11924 }
11925 
11926 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m2_m(
11927 // CHECK-RV64-NEXT:  entry:
11928 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11929 // CHECK-RV64-NEXT:    ret void
11930 //
test_vsuxseg2ei32_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)11931 void test_vsuxseg2ei32_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
11932   return vsuxseg2ei32_v_u16m2_m(mask, base, bindex, v0, v1, vl);
11933 }
11934 
11935 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u16m2_m(
11936 // CHECK-RV64-NEXT:  entry:
11937 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11938 // CHECK-RV64-NEXT:    ret void
11939 //
test_vsuxseg3ei32_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)11940 void test_vsuxseg3ei32_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
11941   return vsuxseg3ei32_v_u16m2_m(mask, base, bindex, v0, v1, v2, vl);
11942 }
11943 
11944 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u16m2_m(
11945 // CHECK-RV64-NEXT:  entry:
11946 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i32.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11947 // CHECK-RV64-NEXT:    ret void
11948 //
test_vsuxseg4ei32_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint32m4_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)11949 void test_vsuxseg4ei32_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint32m4_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
11950   return vsuxseg4ei32_v_u16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
11951 }
11952 
11953 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u16m4_m(
11954 // CHECK-RV64-NEXT:  entry:
11955 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32.i64(<vscale x 16 x i16> [[V0:%.*]], <vscale x 16 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 16 x i32> [[BINDEX:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11956 // CHECK-RV64-NEXT:    ret void
11957 //
test_vsuxseg2ei32_v_u16m4_m(vbool4_t mask,uint16_t * base,vuint32m8_t bindex,vuint16m4_t v0,vuint16m4_t v1,size_t vl)11958 void test_vsuxseg2ei32_v_u16m4_m (vbool4_t mask, uint16_t *base, vuint32m8_t bindex, vuint16m4_t v0, vuint16m4_t v1, size_t vl) {
11959   return vsuxseg2ei32_v_u16m4_m(mask, base, bindex, v0, v1, vl);
11960 }
11961 
11962 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16mf4_m(
11963 // CHECK-RV64-NEXT:  entry:
11964 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11965 // CHECK-RV64-NEXT:    ret void
11966 //
test_vsuxseg2ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,size_t vl)11967 void test_vsuxseg2ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, size_t vl) {
11968   return vsuxseg2ei64_v_u16mf4_m(mask, base, bindex, v0, v1, vl);
11969 }
11970 
11971 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16mf4_m(
11972 // CHECK-RV64-NEXT:  entry:
11973 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11974 // CHECK-RV64-NEXT:    ret void
11975 //
test_vsuxseg3ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,size_t vl)11976 void test_vsuxseg3ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, size_t vl) {
11977   return vsuxseg3ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, vl);
11978 }
11979 
11980 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16mf4_m(
11981 // CHECK-RV64-NEXT:  entry:
11982 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11983 // CHECK-RV64-NEXT:    ret void
11984 //
test_vsuxseg4ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,size_t vl)11985 void test_vsuxseg4ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, size_t vl) {
11986   return vsuxseg4ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, vl);
11987 }
11988 
11989 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16mf4_m(
11990 // CHECK-RV64-NEXT:  entry:
11991 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
11992 // CHECK-RV64-NEXT:    ret void
11993 //
test_vsuxseg5ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,size_t vl)11994 void test_vsuxseg5ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, size_t vl) {
11995   return vsuxseg5ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
11996 }
11997 
11998 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16mf4_m(
11999 // CHECK-RV64-NEXT:  entry:
12000 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12001 // CHECK-RV64-NEXT:    ret void
12002 //
test_vsuxseg6ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,size_t vl)12003 void test_vsuxseg6ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, size_t vl) {
12004   return vsuxseg6ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12005 }
12006 
12007 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16mf4_m(
12008 // CHECK-RV64-NEXT:  entry:
12009 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12010 // CHECK-RV64-NEXT:    ret void
12011 //
test_vsuxseg7ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,size_t vl)12012 void test_vsuxseg7ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, size_t vl) {
12013   return vsuxseg7ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12014 }
12015 
12016 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16mf4_m(
12017 // CHECK-RV64-NEXT:  entry:
12018 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i16.nxv1i64.i64(<vscale x 1 x i16> [[V0:%.*]], <vscale x 1 x i16> [[V1:%.*]], <vscale x 1 x i16> [[V2:%.*]], <vscale x 1 x i16> [[V3:%.*]], <vscale x 1 x i16> [[V4:%.*]], <vscale x 1 x i16> [[V5:%.*]], <vscale x 1 x i16> [[V6:%.*]], <vscale x 1 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12019 // CHECK-RV64-NEXT:    ret void
12020 //
test_vsuxseg8ei64_v_u16mf4_m(vbool64_t mask,uint16_t * base,vuint64m1_t bindex,vuint16mf4_t v0,vuint16mf4_t v1,vuint16mf4_t v2,vuint16mf4_t v3,vuint16mf4_t v4,vuint16mf4_t v5,vuint16mf4_t v6,vuint16mf4_t v7,size_t vl)12021 void test_vsuxseg8ei64_v_u16mf4_m (vbool64_t mask, uint16_t *base, vuint64m1_t bindex, vuint16mf4_t v0, vuint16mf4_t v1, vuint16mf4_t v2, vuint16mf4_t v3, vuint16mf4_t v4, vuint16mf4_t v5, vuint16mf4_t v6, vuint16mf4_t v7, size_t vl) {
12022   return vsuxseg8ei64_v_u16mf4_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12023 }
12024 
12025 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16mf2_m(
12026 // CHECK-RV64-NEXT:  entry:
12027 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12028 // CHECK-RV64-NEXT:    ret void
12029 //
test_vsuxseg2ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,size_t vl)12030 void test_vsuxseg2ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, size_t vl) {
12031   return vsuxseg2ei64_v_u16mf2_m(mask, base, bindex, v0, v1, vl);
12032 }
12033 
12034 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16mf2_m(
12035 // CHECK-RV64-NEXT:  entry:
12036 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12037 // CHECK-RV64-NEXT:    ret void
12038 //
test_vsuxseg3ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,size_t vl)12039 void test_vsuxseg3ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, size_t vl) {
12040   return vsuxseg3ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, vl);
12041 }
12042 
12043 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16mf2_m(
12044 // CHECK-RV64-NEXT:  entry:
12045 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12046 // CHECK-RV64-NEXT:    ret void
12047 //
test_vsuxseg4ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,size_t vl)12048 void test_vsuxseg4ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, size_t vl) {
12049   return vsuxseg4ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12050 }
12051 
12052 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16mf2_m(
12053 // CHECK-RV64-NEXT:  entry:
12054 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12055 // CHECK-RV64-NEXT:    ret void
12056 //
test_vsuxseg5ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,size_t vl)12057 void test_vsuxseg5ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, size_t vl) {
12058   return vsuxseg5ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12059 }
12060 
12061 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16mf2_m(
12062 // CHECK-RV64-NEXT:  entry:
12063 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12064 // CHECK-RV64-NEXT:    ret void
12065 //
test_vsuxseg6ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,size_t vl)12066 void test_vsuxseg6ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, size_t vl) {
12067   return vsuxseg6ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12068 }
12069 
12070 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16mf2_m(
12071 // CHECK-RV64-NEXT:  entry:
12072 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12073 // CHECK-RV64-NEXT:    ret void
12074 //
test_vsuxseg7ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,size_t vl)12075 void test_vsuxseg7ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, size_t vl) {
12076   return vsuxseg7ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12077 }
12078 
12079 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16mf2_m(
12080 // CHECK-RV64-NEXT:  entry:
12081 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i16.nxv2i64.i64(<vscale x 2 x i16> [[V0:%.*]], <vscale x 2 x i16> [[V1:%.*]], <vscale x 2 x i16> [[V2:%.*]], <vscale x 2 x i16> [[V3:%.*]], <vscale x 2 x i16> [[V4:%.*]], <vscale x 2 x i16> [[V5:%.*]], <vscale x 2 x i16> [[V6:%.*]], <vscale x 2 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12082 // CHECK-RV64-NEXT:    ret void
12083 //
test_vsuxseg8ei64_v_u16mf2_m(vbool32_t mask,uint16_t * base,vuint64m2_t bindex,vuint16mf2_t v0,vuint16mf2_t v1,vuint16mf2_t v2,vuint16mf2_t v3,vuint16mf2_t v4,vuint16mf2_t v5,vuint16mf2_t v6,vuint16mf2_t v7,size_t vl)12084 void test_vsuxseg8ei64_v_u16mf2_m (vbool32_t mask, uint16_t *base, vuint64m2_t bindex, vuint16mf2_t v0, vuint16mf2_t v1, vuint16mf2_t v2, vuint16mf2_t v3, vuint16mf2_t v4, vuint16mf2_t v5, vuint16mf2_t v6, vuint16mf2_t v7, size_t vl) {
12085   return vsuxseg8ei64_v_u16mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12086 }
12087 
12088 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16m1_m(
12089 // CHECK-RV64-NEXT:  entry:
12090 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12091 // CHECK-RV64-NEXT:    ret void
12092 //
test_vsuxseg2ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,size_t vl)12093 void test_vsuxseg2ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, size_t vl) {
12094   return vsuxseg2ei64_v_u16m1_m(mask, base, bindex, v0, v1, vl);
12095 }
12096 
12097 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16m1_m(
12098 // CHECK-RV64-NEXT:  entry:
12099 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12100 // CHECK-RV64-NEXT:    ret void
12101 //
test_vsuxseg3ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,size_t vl)12102 void test_vsuxseg3ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, size_t vl) {
12103   return vsuxseg3ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, vl);
12104 }
12105 
12106 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16m1_m(
12107 // CHECK-RV64-NEXT:  entry:
12108 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12109 // CHECK-RV64-NEXT:    ret void
12110 //
test_vsuxseg4ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,size_t vl)12111 void test_vsuxseg4ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, size_t vl) {
12112   return vsuxseg4ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12113 }
12114 
12115 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u16m1_m(
12116 // CHECK-RV64-NEXT:  entry:
12117 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12118 // CHECK-RV64-NEXT:    ret void
12119 //
test_vsuxseg5ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,size_t vl)12120 void test_vsuxseg5ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, size_t vl) {
12121   return vsuxseg5ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12122 }
12123 
12124 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u16m1_m(
12125 // CHECK-RV64-NEXT:  entry:
12126 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12127 // CHECK-RV64-NEXT:    ret void
12128 //
test_vsuxseg6ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,size_t vl)12129 void test_vsuxseg6ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, size_t vl) {
12130   return vsuxseg6ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12131 }
12132 
12133 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u16m1_m(
12134 // CHECK-RV64-NEXT:  entry:
12135 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12136 // CHECK-RV64-NEXT:    ret void
12137 //
test_vsuxseg7ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,size_t vl)12138 void test_vsuxseg7ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, size_t vl) {
12139   return vsuxseg7ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12140 }
12141 
12142 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u16m1_m(
12143 // CHECK-RV64-NEXT:  entry:
12144 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv4i16.nxv4i64.i64(<vscale x 4 x i16> [[V0:%.*]], <vscale x 4 x i16> [[V1:%.*]], <vscale x 4 x i16> [[V2:%.*]], <vscale x 4 x i16> [[V3:%.*]], <vscale x 4 x i16> [[V4:%.*]], <vscale x 4 x i16> [[V5:%.*]], <vscale x 4 x i16> [[V6:%.*]], <vscale x 4 x i16> [[V7:%.*]], i16* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12145 // CHECK-RV64-NEXT:    ret void
12146 //
test_vsuxseg8ei64_v_u16m1_m(vbool16_t mask,uint16_t * base,vuint64m4_t bindex,vuint16m1_t v0,vuint16m1_t v1,vuint16m1_t v2,vuint16m1_t v3,vuint16m1_t v4,vuint16m1_t v5,vuint16m1_t v6,vuint16m1_t v7,size_t vl)12147 void test_vsuxseg8ei64_v_u16m1_m (vbool16_t mask, uint16_t *base, vuint64m4_t bindex, vuint16m1_t v0, vuint16m1_t v1, vuint16m1_t v2, vuint16m1_t v3, vuint16m1_t v4, vuint16m1_t v5, vuint16m1_t v6, vuint16m1_t v7, size_t vl) {
12148   return vsuxseg8ei64_v_u16m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12149 }
12150 
12151 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u16m2_m(
12152 // CHECK-RV64-NEXT:  entry:
12153 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12154 // CHECK-RV64-NEXT:    ret void
12155 //
test_vsuxseg2ei64_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,size_t vl)12156 void test_vsuxseg2ei64_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, size_t vl) {
12157   return vsuxseg2ei64_v_u16m2_m(mask, base, bindex, v0, v1, vl);
12158 }
12159 
12160 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u16m2_m(
12161 // CHECK-RV64-NEXT:  entry:
12162 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12163 // CHECK-RV64-NEXT:    ret void
12164 //
test_vsuxseg3ei64_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,size_t vl)12165 void test_vsuxseg3ei64_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, size_t vl) {
12166   return vsuxseg3ei64_v_u16m2_m(mask, base, bindex, v0, v1, v2, vl);
12167 }
12168 
12169 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u16m2_m(
12170 // CHECK-RV64-NEXT:  entry:
12171 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv8i16.nxv8i64.i64(<vscale x 8 x i16> [[V0:%.*]], <vscale x 8 x i16> [[V1:%.*]], <vscale x 8 x i16> [[V2:%.*]], <vscale x 8 x i16> [[V3:%.*]], i16* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12172 // CHECK-RV64-NEXT:    ret void
12173 //
test_vsuxseg4ei64_v_u16m2_m(vbool8_t mask,uint16_t * base,vuint64m8_t bindex,vuint16m2_t v0,vuint16m2_t v1,vuint16m2_t v2,vuint16m2_t v3,size_t vl)12174 void test_vsuxseg4ei64_v_u16m2_m (vbool8_t mask, uint16_t *base, vuint64m8_t bindex, vuint16m2_t v0, vuint16m2_t v1, vuint16m2_t v2, vuint16m2_t v3, size_t vl) {
12175   return vsuxseg4ei64_v_u16m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12176 }
12177 
12178 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32mf2_m(
12179 // CHECK-RV64-NEXT:  entry:
12180 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12181 // CHECK-RV64-NEXT:    ret void
12182 //
test_vsuxseg2ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)12183 void test_vsuxseg2ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
12184   return vsuxseg2ei8_v_u32mf2_m(mask, base, bindex, v0, v1, vl);
12185 }
12186 
12187 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32mf2_m(
12188 // CHECK-RV64-NEXT:  entry:
12189 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12190 // CHECK-RV64-NEXT:    ret void
12191 //
test_vsuxseg3ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)12192 void test_vsuxseg3ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
12193   return vsuxseg3ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, vl);
12194 }
12195 
12196 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32mf2_m(
12197 // CHECK-RV64-NEXT:  entry:
12198 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12199 // CHECK-RV64-NEXT:    ret void
12200 //
test_vsuxseg4ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)12201 void test_vsuxseg4ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
12202   return vsuxseg4ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12203 }
12204 
12205 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u32mf2_m(
12206 // CHECK-RV64-NEXT:  entry:
12207 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12208 // CHECK-RV64-NEXT:    ret void
12209 //
test_vsuxseg5ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)12210 void test_vsuxseg5ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
12211   return vsuxseg5ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12212 }
12213 
12214 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u32mf2_m(
12215 // CHECK-RV64-NEXT:  entry:
12216 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12217 // CHECK-RV64-NEXT:    ret void
12218 //
test_vsuxseg6ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)12219 void test_vsuxseg6ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
12220   return vsuxseg6ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12221 }
12222 
12223 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u32mf2_m(
12224 // CHECK-RV64-NEXT:  entry:
12225 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12226 // CHECK-RV64-NEXT:    ret void
12227 //
test_vsuxseg7ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)12228 void test_vsuxseg7ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
12229   return vsuxseg7ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12230 }
12231 
12232 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u32mf2_m(
12233 // CHECK-RV64-NEXT:  entry:
12234 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i8.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12235 // CHECK-RV64-NEXT:    ret void
12236 //
test_vsuxseg8ei8_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint8mf8_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)12237 void test_vsuxseg8ei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
12238   return vsuxseg8ei8_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12239 }
12240 
12241 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m1_m(
12242 // CHECK-RV64-NEXT:  entry:
12243 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12244 // CHECK-RV64-NEXT:    ret void
12245 //
test_vsuxseg2ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)12246 void test_vsuxseg2ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
12247   return vsuxseg2ei8_v_u32m1_m(mask, base, bindex, v0, v1, vl);
12248 }
12249 
12250 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32m1_m(
12251 // CHECK-RV64-NEXT:  entry:
12252 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12253 // CHECK-RV64-NEXT:    ret void
12254 //
test_vsuxseg3ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)12255 void test_vsuxseg3ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
12256   return vsuxseg3ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, vl);
12257 }
12258 
12259 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32m1_m(
12260 // CHECK-RV64-NEXT:  entry:
12261 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12262 // CHECK-RV64-NEXT:    ret void
12263 //
test_vsuxseg4ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)12264 void test_vsuxseg4ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
12265   return vsuxseg4ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12266 }
12267 
12268 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u32m1_m(
12269 // CHECK-RV64-NEXT:  entry:
12270 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12271 // CHECK-RV64-NEXT:    ret void
12272 //
test_vsuxseg5ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)12273 void test_vsuxseg5ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
12274   return vsuxseg5ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12275 }
12276 
12277 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u32m1_m(
12278 // CHECK-RV64-NEXT:  entry:
12279 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12280 // CHECK-RV64-NEXT:    ret void
12281 //
test_vsuxseg6ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)12282 void test_vsuxseg6ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
12283   return vsuxseg6ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12284 }
12285 
12286 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u32m1_m(
12287 // CHECK-RV64-NEXT:  entry:
12288 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12289 // CHECK-RV64-NEXT:    ret void
12290 //
test_vsuxseg7ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)12291 void test_vsuxseg7ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
12292   return vsuxseg7ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12293 }
12294 
12295 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u32m1_m(
12296 // CHECK-RV64-NEXT:  entry:
12297 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i8.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12298 // CHECK-RV64-NEXT:    ret void
12299 //
test_vsuxseg8ei8_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint8mf4_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)12300 void test_vsuxseg8ei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
12301   return vsuxseg8ei8_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12302 }
12303 
12304 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m2_m(
12305 // CHECK-RV64-NEXT:  entry:
12306 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12307 // CHECK-RV64-NEXT:    ret void
12308 //
test_vsuxseg2ei8_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)12309 void test_vsuxseg2ei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
12310   return vsuxseg2ei8_v_u32m2_m(mask, base, bindex, v0, v1, vl);
12311 }
12312 
12313 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u32m2_m(
12314 // CHECK-RV64-NEXT:  entry:
12315 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12316 // CHECK-RV64-NEXT:    ret void
12317 //
test_vsuxseg3ei8_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)12318 void test_vsuxseg3ei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
12319   return vsuxseg3ei8_v_u32m2_m(mask, base, bindex, v0, v1, v2, vl);
12320 }
12321 
12322 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u32m2_m(
12323 // CHECK-RV64-NEXT:  entry:
12324 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12325 // CHECK-RV64-NEXT:    ret void
12326 //
test_vsuxseg4ei8_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint8mf2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)12327 void test_vsuxseg4ei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
12328   return vsuxseg4ei8_v_u32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12329 }
12330 
12331 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u32m4_m(
12332 // CHECK-RV64-NEXT:  entry:
12333 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i8.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12334 // CHECK-RV64-NEXT:    ret void
12335 //
test_vsuxseg2ei8_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint8m1_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)12336 void test_vsuxseg2ei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
12337   return vsuxseg2ei8_v_u32m4_m(mask, base, bindex, v0, v1, vl);
12338 }
12339 
12340 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32mf2_m(
12341 // CHECK-RV64-NEXT:  entry:
12342 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12343 // CHECK-RV64-NEXT:    ret void
12344 //
test_vsuxseg2ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)12345 void test_vsuxseg2ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
12346   return vsuxseg2ei16_v_u32mf2_m(mask, base, bindex, v0, v1, vl);
12347 }
12348 
12349 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32mf2_m(
12350 // CHECK-RV64-NEXT:  entry:
12351 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12352 // CHECK-RV64-NEXT:    ret void
12353 //
test_vsuxseg3ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)12354 void test_vsuxseg3ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
12355   return vsuxseg3ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, vl);
12356 }
12357 
12358 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32mf2_m(
12359 // CHECK-RV64-NEXT:  entry:
12360 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12361 // CHECK-RV64-NEXT:    ret void
12362 //
test_vsuxseg4ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)12363 void test_vsuxseg4ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
12364   return vsuxseg4ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12365 }
12366 
12367 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u32mf2_m(
12368 // CHECK-RV64-NEXT:  entry:
12369 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12370 // CHECK-RV64-NEXT:    ret void
12371 //
test_vsuxseg5ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)12372 void test_vsuxseg5ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
12373   return vsuxseg5ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12374 }
12375 
12376 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u32mf2_m(
12377 // CHECK-RV64-NEXT:  entry:
12378 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12379 // CHECK-RV64-NEXT:    ret void
12380 //
test_vsuxseg6ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)12381 void test_vsuxseg6ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
12382   return vsuxseg6ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12383 }
12384 
12385 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u32mf2_m(
12386 // CHECK-RV64-NEXT:  entry:
12387 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12388 // CHECK-RV64-NEXT:    ret void
12389 //
test_vsuxseg7ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)12390 void test_vsuxseg7ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
12391   return vsuxseg7ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12392 }
12393 
12394 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u32mf2_m(
12395 // CHECK-RV64-NEXT:  entry:
12396 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i16.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12397 // CHECK-RV64-NEXT:    ret void
12398 //
test_vsuxseg8ei16_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint16mf4_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)12399 void test_vsuxseg8ei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
12400   return vsuxseg8ei16_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12401 }
12402 
12403 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m1_m(
12404 // CHECK-RV64-NEXT:  entry:
12405 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12406 // CHECK-RV64-NEXT:    ret void
12407 //
test_vsuxseg2ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)12408 void test_vsuxseg2ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
12409   return vsuxseg2ei16_v_u32m1_m(mask, base, bindex, v0, v1, vl);
12410 }
12411 
12412 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32m1_m(
12413 // CHECK-RV64-NEXT:  entry:
12414 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12415 // CHECK-RV64-NEXT:    ret void
12416 //
test_vsuxseg3ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)12417 void test_vsuxseg3ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
12418   return vsuxseg3ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, vl);
12419 }
12420 
12421 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32m1_m(
12422 // CHECK-RV64-NEXT:  entry:
12423 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12424 // CHECK-RV64-NEXT:    ret void
12425 //
test_vsuxseg4ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)12426 void test_vsuxseg4ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
12427   return vsuxseg4ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12428 }
12429 
12430 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u32m1_m(
12431 // CHECK-RV64-NEXT:  entry:
12432 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12433 // CHECK-RV64-NEXT:    ret void
12434 //
test_vsuxseg5ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)12435 void test_vsuxseg5ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
12436   return vsuxseg5ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12437 }
12438 
12439 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u32m1_m(
12440 // CHECK-RV64-NEXT:  entry:
12441 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12442 // CHECK-RV64-NEXT:    ret void
12443 //
test_vsuxseg6ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)12444 void test_vsuxseg6ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
12445   return vsuxseg6ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12446 }
12447 
12448 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u32m1_m(
12449 // CHECK-RV64-NEXT:  entry:
12450 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12451 // CHECK-RV64-NEXT:    ret void
12452 //
test_vsuxseg7ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)12453 void test_vsuxseg7ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
12454   return vsuxseg7ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12455 }
12456 
12457 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u32m1_m(
12458 // CHECK-RV64-NEXT:  entry:
12459 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i16.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12460 // CHECK-RV64-NEXT:    ret void
12461 //
test_vsuxseg8ei16_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint16mf2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)12462 void test_vsuxseg8ei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
12463   return vsuxseg8ei16_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12464 }
12465 
12466 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m2_m(
12467 // CHECK-RV64-NEXT:  entry:
12468 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12469 // CHECK-RV64-NEXT:    ret void
12470 //
test_vsuxseg2ei16_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)12471 void test_vsuxseg2ei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
12472   return vsuxseg2ei16_v_u32m2_m(mask, base, bindex, v0, v1, vl);
12473 }
12474 
12475 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u32m2_m(
12476 // CHECK-RV64-NEXT:  entry:
12477 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12478 // CHECK-RV64-NEXT:    ret void
12479 //
test_vsuxseg3ei16_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)12480 void test_vsuxseg3ei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
12481   return vsuxseg3ei16_v_u32m2_m(mask, base, bindex, v0, v1, v2, vl);
12482 }
12483 
12484 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u32m2_m(
12485 // CHECK-RV64-NEXT:  entry:
12486 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12487 // CHECK-RV64-NEXT:    ret void
12488 //
test_vsuxseg4ei16_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint16m1_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)12489 void test_vsuxseg4ei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
12490   return vsuxseg4ei16_v_u32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12491 }
12492 
12493 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u32m4_m(
12494 // CHECK-RV64-NEXT:  entry:
12495 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i16.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12496 // CHECK-RV64-NEXT:    ret void
12497 //
test_vsuxseg2ei16_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint16m2_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)12498 void test_vsuxseg2ei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
12499   return vsuxseg2ei16_v_u32m4_m(mask, base, bindex, v0, v1, vl);
12500 }
12501 
12502 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32mf2_m(
12503 // CHECK-RV64-NEXT:  entry:
12504 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12505 // CHECK-RV64-NEXT:    ret void
12506 //
test_vsuxseg2ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)12507 void test_vsuxseg2ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
12508   return vsuxseg2ei32_v_u32mf2_m(mask, base, bindex, v0, v1, vl);
12509 }
12510 
12511 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32mf2_m(
12512 // CHECK-RV64-NEXT:  entry:
12513 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12514 // CHECK-RV64-NEXT:    ret void
12515 //
test_vsuxseg3ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)12516 void test_vsuxseg3ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
12517   return vsuxseg3ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, vl);
12518 }
12519 
12520 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32mf2_m(
12521 // CHECK-RV64-NEXT:  entry:
12522 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12523 // CHECK-RV64-NEXT:    ret void
12524 //
test_vsuxseg4ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)12525 void test_vsuxseg4ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
12526   return vsuxseg4ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12527 }
12528 
12529 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u32mf2_m(
12530 // CHECK-RV64-NEXT:  entry:
12531 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12532 // CHECK-RV64-NEXT:    ret void
12533 //
test_vsuxseg5ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)12534 void test_vsuxseg5ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
12535   return vsuxseg5ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12536 }
12537 
12538 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u32mf2_m(
12539 // CHECK-RV64-NEXT:  entry:
12540 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12541 // CHECK-RV64-NEXT:    ret void
12542 //
test_vsuxseg6ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)12543 void test_vsuxseg6ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
12544   return vsuxseg6ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12545 }
12546 
12547 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u32mf2_m(
12548 // CHECK-RV64-NEXT:  entry:
12549 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12550 // CHECK-RV64-NEXT:    ret void
12551 //
test_vsuxseg7ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)12552 void test_vsuxseg7ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
12553   return vsuxseg7ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12554 }
12555 
12556 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u32mf2_m(
12557 // CHECK-RV64-NEXT:  entry:
12558 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12559 // CHECK-RV64-NEXT:    ret void
12560 //
test_vsuxseg8ei32_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint32mf2_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)12561 void test_vsuxseg8ei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
12562   return vsuxseg8ei32_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12563 }
12564 
12565 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m1_m(
12566 // CHECK-RV64-NEXT:  entry:
12567 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12568 // CHECK-RV64-NEXT:    ret void
12569 //
test_vsuxseg2ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)12570 void test_vsuxseg2ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
12571   return vsuxseg2ei32_v_u32m1_m(mask, base, bindex, v0, v1, vl);
12572 }
12573 
12574 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32m1_m(
12575 // CHECK-RV64-NEXT:  entry:
12576 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12577 // CHECK-RV64-NEXT:    ret void
12578 //
test_vsuxseg3ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)12579 void test_vsuxseg3ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
12580   return vsuxseg3ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, vl);
12581 }
12582 
12583 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32m1_m(
12584 // CHECK-RV64-NEXT:  entry:
12585 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12586 // CHECK-RV64-NEXT:    ret void
12587 //
test_vsuxseg4ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)12588 void test_vsuxseg4ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
12589   return vsuxseg4ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12590 }
12591 
12592 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u32m1_m(
12593 // CHECK-RV64-NEXT:  entry:
12594 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12595 // CHECK-RV64-NEXT:    ret void
12596 //
test_vsuxseg5ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)12597 void test_vsuxseg5ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
12598   return vsuxseg5ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12599 }
12600 
12601 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u32m1_m(
12602 // CHECK-RV64-NEXT:  entry:
12603 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12604 // CHECK-RV64-NEXT:    ret void
12605 //
test_vsuxseg6ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)12606 void test_vsuxseg6ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
12607   return vsuxseg6ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12608 }
12609 
12610 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u32m1_m(
12611 // CHECK-RV64-NEXT:  entry:
12612 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12613 // CHECK-RV64-NEXT:    ret void
12614 //
test_vsuxseg7ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)12615 void test_vsuxseg7ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
12616   return vsuxseg7ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12617 }
12618 
12619 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u32m1_m(
12620 // CHECK-RV64-NEXT:  entry:
12621 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12622 // CHECK-RV64-NEXT:    ret void
12623 //
test_vsuxseg8ei32_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint32m1_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)12624 void test_vsuxseg8ei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
12625   return vsuxseg8ei32_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12626 }
12627 
12628 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m2_m(
12629 // CHECK-RV64-NEXT:  entry:
12630 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12631 // CHECK-RV64-NEXT:    ret void
12632 //
test_vsuxseg2ei32_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)12633 void test_vsuxseg2ei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
12634   return vsuxseg2ei32_v_u32m2_m(mask, base, bindex, v0, v1, vl);
12635 }
12636 
12637 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u32m2_m(
12638 // CHECK-RV64-NEXT:  entry:
12639 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12640 // CHECK-RV64-NEXT:    ret void
12641 //
test_vsuxseg3ei32_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)12642 void test_vsuxseg3ei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
12643   return vsuxseg3ei32_v_u32m2_m(mask, base, bindex, v0, v1, v2, vl);
12644 }
12645 
12646 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u32m2_m(
12647 // CHECK-RV64-NEXT:  entry:
12648 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12649 // CHECK-RV64-NEXT:    ret void
12650 //
test_vsuxseg4ei32_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint32m2_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)12651 void test_vsuxseg4ei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
12652   return vsuxseg4ei32_v_u32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12653 }
12654 
12655 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u32m4_m(
12656 // CHECK-RV64-NEXT:  entry:
12657 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12658 // CHECK-RV64-NEXT:    ret void
12659 //
test_vsuxseg2ei32_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint32m4_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)12660 void test_vsuxseg2ei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
12661   return vsuxseg2ei32_v_u32m4_m(mask, base, bindex, v0, v1, vl);
12662 }
12663 
12664 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32mf2_m(
12665 // CHECK-RV64-NEXT:  entry:
12666 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12667 // CHECK-RV64-NEXT:    ret void
12668 //
test_vsuxseg2ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,size_t vl)12669 void test_vsuxseg2ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, size_t vl) {
12670   return vsuxseg2ei64_v_u32mf2_m(mask, base, bindex, v0, v1, vl);
12671 }
12672 
12673 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32mf2_m(
12674 // CHECK-RV64-NEXT:  entry:
12675 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12676 // CHECK-RV64-NEXT:    ret void
12677 //
test_vsuxseg3ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,size_t vl)12678 void test_vsuxseg3ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, size_t vl) {
12679   return vsuxseg3ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, vl);
12680 }
12681 
12682 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32mf2_m(
12683 // CHECK-RV64-NEXT:  entry:
12684 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12685 // CHECK-RV64-NEXT:    ret void
12686 //
test_vsuxseg4ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,size_t vl)12687 void test_vsuxseg4ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, size_t vl) {
12688   return vsuxseg4ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12689 }
12690 
12691 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u32mf2_m(
12692 // CHECK-RV64-NEXT:  entry:
12693 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12694 // CHECK-RV64-NEXT:    ret void
12695 //
test_vsuxseg5ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,size_t vl)12696 void test_vsuxseg5ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, size_t vl) {
12697   return vsuxseg5ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12698 }
12699 
12700 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u32mf2_m(
12701 // CHECK-RV64-NEXT:  entry:
12702 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12703 // CHECK-RV64-NEXT:    ret void
12704 //
test_vsuxseg6ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,size_t vl)12705 void test_vsuxseg6ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, size_t vl) {
12706   return vsuxseg6ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12707 }
12708 
12709 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u32mf2_m(
12710 // CHECK-RV64-NEXT:  entry:
12711 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12712 // CHECK-RV64-NEXT:    ret void
12713 //
test_vsuxseg7ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,size_t vl)12714 void test_vsuxseg7ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, size_t vl) {
12715   return vsuxseg7ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12716 }
12717 
12718 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u32mf2_m(
12719 // CHECK-RV64-NEXT:  entry:
12720 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i32.nxv1i64.i64(<vscale x 1 x i32> [[V0:%.*]], <vscale x 1 x i32> [[V1:%.*]], <vscale x 1 x i32> [[V2:%.*]], <vscale x 1 x i32> [[V3:%.*]], <vscale x 1 x i32> [[V4:%.*]], <vscale x 1 x i32> [[V5:%.*]], <vscale x 1 x i32> [[V6:%.*]], <vscale x 1 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12721 // CHECK-RV64-NEXT:    ret void
12722 //
test_vsuxseg8ei64_v_u32mf2_m(vbool64_t mask,uint32_t * base,vuint64m1_t bindex,vuint32mf2_t v0,vuint32mf2_t v1,vuint32mf2_t v2,vuint32mf2_t v3,vuint32mf2_t v4,vuint32mf2_t v5,vuint32mf2_t v6,vuint32mf2_t v7,size_t vl)12723 void test_vsuxseg8ei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t v0, vuint32mf2_t v1, vuint32mf2_t v2, vuint32mf2_t v3, vuint32mf2_t v4, vuint32mf2_t v5, vuint32mf2_t v6, vuint32mf2_t v7, size_t vl) {
12724   return vsuxseg8ei64_v_u32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12725 }
12726 
12727 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m1_m(
12728 // CHECK-RV64-NEXT:  entry:
12729 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12730 // CHECK-RV64-NEXT:    ret void
12731 //
test_vsuxseg2ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,size_t vl)12732 void test_vsuxseg2ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, size_t vl) {
12733   return vsuxseg2ei64_v_u32m1_m(mask, base, bindex, v0, v1, vl);
12734 }
12735 
12736 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32m1_m(
12737 // CHECK-RV64-NEXT:  entry:
12738 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12739 // CHECK-RV64-NEXT:    ret void
12740 //
test_vsuxseg3ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,size_t vl)12741 void test_vsuxseg3ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, size_t vl) {
12742   return vsuxseg3ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, vl);
12743 }
12744 
12745 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32m1_m(
12746 // CHECK-RV64-NEXT:  entry:
12747 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12748 // CHECK-RV64-NEXT:    ret void
12749 //
test_vsuxseg4ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,size_t vl)12750 void test_vsuxseg4ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, size_t vl) {
12751   return vsuxseg4ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12752 }
12753 
12754 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u32m1_m(
12755 // CHECK-RV64-NEXT:  entry:
12756 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12757 // CHECK-RV64-NEXT:    ret void
12758 //
test_vsuxseg5ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,size_t vl)12759 void test_vsuxseg5ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, size_t vl) {
12760   return vsuxseg5ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12761 }
12762 
12763 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u32m1_m(
12764 // CHECK-RV64-NEXT:  entry:
12765 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12766 // CHECK-RV64-NEXT:    ret void
12767 //
test_vsuxseg6ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,size_t vl)12768 void test_vsuxseg6ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, size_t vl) {
12769   return vsuxseg6ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12770 }
12771 
12772 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u32m1_m(
12773 // CHECK-RV64-NEXT:  entry:
12774 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12775 // CHECK-RV64-NEXT:    ret void
12776 //
test_vsuxseg7ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,size_t vl)12777 void test_vsuxseg7ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, size_t vl) {
12778   return vsuxseg7ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12779 }
12780 
12781 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u32m1_m(
12782 // CHECK-RV64-NEXT:  entry:
12783 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2i32.nxv2i64.i64(<vscale x 2 x i32> [[V0:%.*]], <vscale x 2 x i32> [[V1:%.*]], <vscale x 2 x i32> [[V2:%.*]], <vscale x 2 x i32> [[V3:%.*]], <vscale x 2 x i32> [[V4:%.*]], <vscale x 2 x i32> [[V5:%.*]], <vscale x 2 x i32> [[V6:%.*]], <vscale x 2 x i32> [[V7:%.*]], i32* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12784 // CHECK-RV64-NEXT:    ret void
12785 //
test_vsuxseg8ei64_v_u32m1_m(vbool32_t mask,uint32_t * base,vuint64m2_t bindex,vuint32m1_t v0,vuint32m1_t v1,vuint32m1_t v2,vuint32m1_t v3,vuint32m1_t v4,vuint32m1_t v5,vuint32m1_t v6,vuint32m1_t v7,size_t vl)12786 void test_vsuxseg8ei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t v0, vuint32m1_t v1, vuint32m1_t v2, vuint32m1_t v3, vuint32m1_t v4, vuint32m1_t v5, vuint32m1_t v6, vuint32m1_t v7, size_t vl) {
12787   return vsuxseg8ei64_v_u32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12788 }
12789 
12790 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m2_m(
12791 // CHECK-RV64-NEXT:  entry:
12792 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12793 // CHECK-RV64-NEXT:    ret void
12794 //
test_vsuxseg2ei64_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,size_t vl)12795 void test_vsuxseg2ei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, size_t vl) {
12796   return vsuxseg2ei64_v_u32m2_m(mask, base, bindex, v0, v1, vl);
12797 }
12798 
12799 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u32m2_m(
12800 // CHECK-RV64-NEXT:  entry:
12801 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12802 // CHECK-RV64-NEXT:    ret void
12803 //
test_vsuxseg3ei64_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,size_t vl)12804 void test_vsuxseg3ei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, size_t vl) {
12805   return vsuxseg3ei64_v_u32m2_m(mask, base, bindex, v0, v1, v2, vl);
12806 }
12807 
12808 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u32m2_m(
12809 // CHECK-RV64-NEXT:  entry:
12810 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i64.i64(<vscale x 4 x i32> [[V0:%.*]], <vscale x 4 x i32> [[V1:%.*]], <vscale x 4 x i32> [[V2:%.*]], <vscale x 4 x i32> [[V3:%.*]], i32* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12811 // CHECK-RV64-NEXT:    ret void
12812 //
test_vsuxseg4ei64_v_u32m2_m(vbool16_t mask,uint32_t * base,vuint64m4_t bindex,vuint32m2_t v0,vuint32m2_t v1,vuint32m2_t v2,vuint32m2_t v3,size_t vl)12813 void test_vsuxseg4ei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t v0, vuint32m2_t v1, vuint32m2_t v2, vuint32m2_t v3, size_t vl) {
12814   return vsuxseg4ei64_v_u32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12815 }
12816 
12817 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u32m4_m(
12818 // CHECK-RV64-NEXT:  entry:
12819 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8i32.nxv8i64.i64(<vscale x 8 x i32> [[V0:%.*]], <vscale x 8 x i32> [[V1:%.*]], i32* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12820 // CHECK-RV64-NEXT:    ret void
12821 //
test_vsuxseg2ei64_v_u32m4_m(vbool8_t mask,uint32_t * base,vuint64m8_t bindex,vuint32m4_t v0,vuint32m4_t v1,size_t vl)12822 void test_vsuxseg2ei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t v0, vuint32m4_t v1, size_t vl) {
12823   return vsuxseg2ei64_v_u32m4_m(mask, base, bindex, v0, v1, vl);
12824 }
12825 
12826 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m1_m(
12827 // CHECK-RV64-NEXT:  entry:
12828 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12829 // CHECK-RV64-NEXT:    ret void
12830 //
test_vsuxseg2ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)12831 void test_vsuxseg2ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
12832   return vsuxseg2ei8_v_u64m1_m(mask, base, bindex, v0, v1, vl);
12833 }
12834 
12835 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u64m1_m(
12836 // CHECK-RV64-NEXT:  entry:
12837 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12838 // CHECK-RV64-NEXT:    ret void
12839 //
test_vsuxseg3ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)12840 void test_vsuxseg3ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
12841   return vsuxseg3ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, vl);
12842 }
12843 
12844 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u64m1_m(
12845 // CHECK-RV64-NEXT:  entry:
12846 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12847 // CHECK-RV64-NEXT:    ret void
12848 //
test_vsuxseg4ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)12849 void test_vsuxseg4ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
12850   return vsuxseg4ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12851 }
12852 
12853 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_u64m1_m(
12854 // CHECK-RV64-NEXT:  entry:
12855 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12856 // CHECK-RV64-NEXT:    ret void
12857 //
test_vsuxseg5ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)12858 void test_vsuxseg5ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
12859   return vsuxseg5ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12860 }
12861 
12862 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_u64m1_m(
12863 // CHECK-RV64-NEXT:  entry:
12864 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12865 // CHECK-RV64-NEXT:    ret void
12866 //
test_vsuxseg6ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)12867 void test_vsuxseg6ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
12868   return vsuxseg6ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12869 }
12870 
12871 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_u64m1_m(
12872 // CHECK-RV64-NEXT:  entry:
12873 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12874 // CHECK-RV64-NEXT:    ret void
12875 //
test_vsuxseg7ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)12876 void test_vsuxseg7ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
12877   return vsuxseg7ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12878 }
12879 
12880 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_u64m1_m(
12881 // CHECK-RV64-NEXT:  entry:
12882 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i8.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12883 // CHECK-RV64-NEXT:    ret void
12884 //
test_vsuxseg8ei8_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint8mf8_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)12885 void test_vsuxseg8ei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
12886   return vsuxseg8ei8_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12887 }
12888 
12889 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m2_m(
12890 // CHECK-RV64-NEXT:  entry:
12891 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12892 // CHECK-RV64-NEXT:    ret void
12893 //
test_vsuxseg2ei8_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)12894 void test_vsuxseg2ei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
12895   return vsuxseg2ei8_v_u64m2_m(mask, base, bindex, v0, v1, vl);
12896 }
12897 
12898 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_u64m2_m(
12899 // CHECK-RV64-NEXT:  entry:
12900 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12901 // CHECK-RV64-NEXT:    ret void
12902 //
test_vsuxseg3ei8_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)12903 void test_vsuxseg3ei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
12904   return vsuxseg3ei8_v_u64m2_m(mask, base, bindex, v0, v1, v2, vl);
12905 }
12906 
12907 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_u64m2_m(
12908 // CHECK-RV64-NEXT:  entry:
12909 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i8.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12910 // CHECK-RV64-NEXT:    ret void
12911 //
test_vsuxseg4ei8_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint8mf4_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)12912 void test_vsuxseg4ei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
12913   return vsuxseg4ei8_v_u64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
12914 }
12915 
12916 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_u64m4_m(
12917 // CHECK-RV64-NEXT:  entry:
12918 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i8.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12919 // CHECK-RV64-NEXT:    ret void
12920 //
test_vsuxseg2ei8_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint8mf2_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)12921 void test_vsuxseg2ei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
12922   return vsuxseg2ei8_v_u64m4_m(mask, base, bindex, v0, v1, vl);
12923 }
12924 
12925 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m1_m(
12926 // CHECK-RV64-NEXT:  entry:
12927 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12928 // CHECK-RV64-NEXT:    ret void
12929 //
test_vsuxseg2ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)12930 void test_vsuxseg2ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
12931   return vsuxseg2ei16_v_u64m1_m(mask, base, bindex, v0, v1, vl);
12932 }
12933 
12934 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u64m1_m(
12935 // CHECK-RV64-NEXT:  entry:
12936 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12937 // CHECK-RV64-NEXT:    ret void
12938 //
test_vsuxseg3ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)12939 void test_vsuxseg3ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
12940   return vsuxseg3ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, vl);
12941 }
12942 
12943 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u64m1_m(
12944 // CHECK-RV64-NEXT:  entry:
12945 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12946 // CHECK-RV64-NEXT:    ret void
12947 //
test_vsuxseg4ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)12948 void test_vsuxseg4ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
12949   return vsuxseg4ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
12950 }
12951 
12952 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_u64m1_m(
12953 // CHECK-RV64-NEXT:  entry:
12954 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12955 // CHECK-RV64-NEXT:    ret void
12956 //
test_vsuxseg5ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)12957 void test_vsuxseg5ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
12958   return vsuxseg5ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
12959 }
12960 
12961 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_u64m1_m(
12962 // CHECK-RV64-NEXT:  entry:
12963 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12964 // CHECK-RV64-NEXT:    ret void
12965 //
test_vsuxseg6ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)12966 void test_vsuxseg6ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
12967   return vsuxseg6ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
12968 }
12969 
12970 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_u64m1_m(
12971 // CHECK-RV64-NEXT:  entry:
12972 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12973 // CHECK-RV64-NEXT:    ret void
12974 //
test_vsuxseg7ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)12975 void test_vsuxseg7ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
12976   return vsuxseg7ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
12977 }
12978 
12979 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_u64m1_m(
12980 // CHECK-RV64-NEXT:  entry:
12981 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i16.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12982 // CHECK-RV64-NEXT:    ret void
12983 //
test_vsuxseg8ei16_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint16mf4_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)12984 void test_vsuxseg8ei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
12985   return vsuxseg8ei16_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
12986 }
12987 
12988 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m2_m(
12989 // CHECK-RV64-NEXT:  entry:
12990 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
12991 // CHECK-RV64-NEXT:    ret void
12992 //
test_vsuxseg2ei16_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)12993 void test_vsuxseg2ei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
12994   return vsuxseg2ei16_v_u64m2_m(mask, base, bindex, v0, v1, vl);
12995 }
12996 
12997 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_u64m2_m(
12998 // CHECK-RV64-NEXT:  entry:
12999 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13000 // CHECK-RV64-NEXT:    ret void
13001 //
test_vsuxseg3ei16_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)13002 void test_vsuxseg3ei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
13003   return vsuxseg3ei16_v_u64m2_m(mask, base, bindex, v0, v1, v2, vl);
13004 }
13005 
13006 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_u64m2_m(
13007 // CHECK-RV64-NEXT:  entry:
13008 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i16.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13009 // CHECK-RV64-NEXT:    ret void
13010 //
test_vsuxseg4ei16_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint16mf2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)13011 void test_vsuxseg4ei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
13012   return vsuxseg4ei16_v_u64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13013 }
13014 
13015 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_u64m4_m(
13016 // CHECK-RV64-NEXT:  entry:
13017 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i16.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13018 // CHECK-RV64-NEXT:    ret void
13019 //
test_vsuxseg2ei16_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint16m1_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)13020 void test_vsuxseg2ei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
13021   return vsuxseg2ei16_v_u64m4_m(mask, base, bindex, v0, v1, vl);
13022 }
13023 
13024 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m1_m(
13025 // CHECK-RV64-NEXT:  entry:
13026 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13027 // CHECK-RV64-NEXT:    ret void
13028 //
test_vsuxseg2ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)13029 void test_vsuxseg2ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
13030   return vsuxseg2ei32_v_u64m1_m(mask, base, bindex, v0, v1, vl);
13031 }
13032 
13033 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u64m1_m(
13034 // CHECK-RV64-NEXT:  entry:
13035 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13036 // CHECK-RV64-NEXT:    ret void
13037 //
test_vsuxseg3ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)13038 void test_vsuxseg3ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
13039   return vsuxseg3ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, vl);
13040 }
13041 
13042 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u64m1_m(
13043 // CHECK-RV64-NEXT:  entry:
13044 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13045 // CHECK-RV64-NEXT:    ret void
13046 //
test_vsuxseg4ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)13047 void test_vsuxseg4ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
13048   return vsuxseg4ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13049 }
13050 
13051 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_u64m1_m(
13052 // CHECK-RV64-NEXT:  entry:
13053 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13054 // CHECK-RV64-NEXT:    ret void
13055 //
test_vsuxseg5ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)13056 void test_vsuxseg5ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
13057   return vsuxseg5ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13058 }
13059 
13060 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_u64m1_m(
13061 // CHECK-RV64-NEXT:  entry:
13062 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13063 // CHECK-RV64-NEXT:    ret void
13064 //
test_vsuxseg6ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)13065 void test_vsuxseg6ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
13066   return vsuxseg6ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13067 }
13068 
13069 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_u64m1_m(
13070 // CHECK-RV64-NEXT:  entry:
13071 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13072 // CHECK-RV64-NEXT:    ret void
13073 //
test_vsuxseg7ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)13074 void test_vsuxseg7ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
13075   return vsuxseg7ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13076 }
13077 
13078 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_u64m1_m(
13079 // CHECK-RV64-NEXT:  entry:
13080 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i32.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13081 // CHECK-RV64-NEXT:    ret void
13082 //
test_vsuxseg8ei32_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint32mf2_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)13083 void test_vsuxseg8ei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
13084   return vsuxseg8ei32_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13085 }
13086 
13087 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m2_m(
13088 // CHECK-RV64-NEXT:  entry:
13089 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13090 // CHECK-RV64-NEXT:    ret void
13091 //
test_vsuxseg2ei32_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)13092 void test_vsuxseg2ei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
13093   return vsuxseg2ei32_v_u64m2_m(mask, base, bindex, v0, v1, vl);
13094 }
13095 
13096 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_u64m2_m(
13097 // CHECK-RV64-NEXT:  entry:
13098 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13099 // CHECK-RV64-NEXT:    ret void
13100 //
test_vsuxseg3ei32_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)13101 void test_vsuxseg3ei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
13102   return vsuxseg3ei32_v_u64m2_m(mask, base, bindex, v0, v1, v2, vl);
13103 }
13104 
13105 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_u64m2_m(
13106 // CHECK-RV64-NEXT:  entry:
13107 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i32.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13108 // CHECK-RV64-NEXT:    ret void
13109 //
test_vsuxseg4ei32_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint32m1_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)13110 void test_vsuxseg4ei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
13111   return vsuxseg4ei32_v_u64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13112 }
13113 
13114 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_u64m4_m(
13115 // CHECK-RV64-NEXT:  entry:
13116 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i32.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13117 // CHECK-RV64-NEXT:    ret void
13118 //
test_vsuxseg2ei32_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint32m2_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)13119 void test_vsuxseg2ei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
13120   return vsuxseg2ei32_v_u64m4_m(mask, base, bindex, v0, v1, vl);
13121 }
13122 
13123 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m1_m(
13124 // CHECK-RV64-NEXT:  entry:
13125 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13126 // CHECK-RV64-NEXT:    ret void
13127 //
test_vsuxseg2ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,size_t vl)13128 void test_vsuxseg2ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, size_t vl) {
13129   return vsuxseg2ei64_v_u64m1_m(mask, base, bindex, v0, v1, vl);
13130 }
13131 
13132 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u64m1_m(
13133 // CHECK-RV64-NEXT:  entry:
13134 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13135 // CHECK-RV64-NEXT:    ret void
13136 //
test_vsuxseg3ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,size_t vl)13137 void test_vsuxseg3ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, size_t vl) {
13138   return vsuxseg3ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, vl);
13139 }
13140 
13141 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u64m1_m(
13142 // CHECK-RV64-NEXT:  entry:
13143 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13144 // CHECK-RV64-NEXT:    ret void
13145 //
test_vsuxseg4ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,size_t vl)13146 void test_vsuxseg4ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, size_t vl) {
13147   return vsuxseg4ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13148 }
13149 
13150 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_u64m1_m(
13151 // CHECK-RV64-NEXT:  entry:
13152 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13153 // CHECK-RV64-NEXT:    ret void
13154 //
test_vsuxseg5ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,size_t vl)13155 void test_vsuxseg5ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, size_t vl) {
13156   return vsuxseg5ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13157 }
13158 
13159 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_u64m1_m(
13160 // CHECK-RV64-NEXT:  entry:
13161 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13162 // CHECK-RV64-NEXT:    ret void
13163 //
test_vsuxseg6ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,size_t vl)13164 void test_vsuxseg6ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, size_t vl) {
13165   return vsuxseg6ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13166 }
13167 
13168 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_u64m1_m(
13169 // CHECK-RV64-NEXT:  entry:
13170 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13171 // CHECK-RV64-NEXT:    ret void
13172 //
test_vsuxseg7ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,size_t vl)13173 void test_vsuxseg7ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, size_t vl) {
13174   return vsuxseg7ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13175 }
13176 
13177 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_u64m1_m(
13178 // CHECK-RV64-NEXT:  entry:
13179 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[V0:%.*]], <vscale x 1 x i64> [[V1:%.*]], <vscale x 1 x i64> [[V2:%.*]], <vscale x 1 x i64> [[V3:%.*]], <vscale x 1 x i64> [[V4:%.*]], <vscale x 1 x i64> [[V5:%.*]], <vscale x 1 x i64> [[V6:%.*]], <vscale x 1 x i64> [[V7:%.*]], i64* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13180 // CHECK-RV64-NEXT:    ret void
13181 //
test_vsuxseg8ei64_v_u64m1_m(vbool64_t mask,uint64_t * base,vuint64m1_t bindex,vuint64m1_t v0,vuint64m1_t v1,vuint64m1_t v2,vuint64m1_t v3,vuint64m1_t v4,vuint64m1_t v5,vuint64m1_t v6,vuint64m1_t v7,size_t vl)13182 void test_vsuxseg8ei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t v0, vuint64m1_t v1, vuint64m1_t v2, vuint64m1_t v3, vuint64m1_t v4, vuint64m1_t v5, vuint64m1_t v6, vuint64m1_t v7, size_t vl) {
13183   return vsuxseg8ei64_v_u64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13184 }
13185 
13186 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m2_m(
13187 // CHECK-RV64-NEXT:  entry:
13188 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13189 // CHECK-RV64-NEXT:    ret void
13190 //
test_vsuxseg2ei64_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,size_t vl)13191 void test_vsuxseg2ei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, size_t vl) {
13192   return vsuxseg2ei64_v_u64m2_m(mask, base, bindex, v0, v1, vl);
13193 }
13194 
13195 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_u64m2_m(
13196 // CHECK-RV64-NEXT:  entry:
13197 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13198 // CHECK-RV64-NEXT:    ret void
13199 //
test_vsuxseg3ei64_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,size_t vl)13200 void test_vsuxseg3ei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, size_t vl) {
13201   return vsuxseg3ei64_v_u64m2_m(mask, base, bindex, v0, v1, v2, vl);
13202 }
13203 
13204 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_u64m2_m(
13205 // CHECK-RV64-NEXT:  entry:
13206 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> [[V0:%.*]], <vscale x 2 x i64> [[V1:%.*]], <vscale x 2 x i64> [[V2:%.*]], <vscale x 2 x i64> [[V3:%.*]], i64* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13207 // CHECK-RV64-NEXT:    ret void
13208 //
test_vsuxseg4ei64_v_u64m2_m(vbool32_t mask,uint64_t * base,vuint64m2_t bindex,vuint64m2_t v0,vuint64m2_t v1,vuint64m2_t v2,vuint64m2_t v3,size_t vl)13209 void test_vsuxseg4ei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t v0, vuint64m2_t v1, vuint64m2_t v2, vuint64m2_t v3, size_t vl) {
13210   return vsuxseg4ei64_v_u64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13211 }
13212 
13213 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_u64m4_m(
13214 // CHECK-RV64-NEXT:  entry:
13215 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> [[V0:%.*]], <vscale x 4 x i64> [[V1:%.*]], i64* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13216 // CHECK-RV64-NEXT:    ret void
13217 //
test_vsuxseg2ei64_v_u64m4_m(vbool16_t mask,uint64_t * base,vuint64m4_t bindex,vuint64m4_t v0,vuint64m4_t v1,size_t vl)13218 void test_vsuxseg2ei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t v0, vuint64m4_t v1, size_t vl) {
13219   return vsuxseg2ei64_v_u64m4_m(mask, base, bindex, v0, v1, vl);
13220 }
13221 
13222 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32mf2_m(
13223 // CHECK-RV64-NEXT:  entry:
13224 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13225 // CHECK-RV64-NEXT:    ret void
13226 //
test_vsuxseg2ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)13227 void test_vsuxseg2ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
13228   return vsuxseg2ei8_v_f32mf2_m(mask, base, bindex, v0, v1, vl);
13229 }
13230 
13231 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32mf2_m(
13232 // CHECK-RV64-NEXT:  entry:
13233 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13234 // CHECK-RV64-NEXT:    ret void
13235 //
test_vsuxseg3ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)13236 void test_vsuxseg3ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
13237   return vsuxseg3ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, vl);
13238 }
13239 
13240 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32mf2_m(
13241 // CHECK-RV64-NEXT:  entry:
13242 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13243 // CHECK-RV64-NEXT:    ret void
13244 //
test_vsuxseg4ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)13245 void test_vsuxseg4ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
13246   return vsuxseg4ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13247 }
13248 
13249 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f32mf2_m(
13250 // CHECK-RV64-NEXT:  entry:
13251 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13252 // CHECK-RV64-NEXT:    ret void
13253 //
test_vsuxseg5ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)13254 void test_vsuxseg5ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
13255   return vsuxseg5ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13256 }
13257 
13258 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f32mf2_m(
13259 // CHECK-RV64-NEXT:  entry:
13260 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13261 // CHECK-RV64-NEXT:    ret void
13262 //
test_vsuxseg6ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)13263 void test_vsuxseg6ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
13264   return vsuxseg6ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13265 }
13266 
13267 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f32mf2_m(
13268 // CHECK-RV64-NEXT:  entry:
13269 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13270 // CHECK-RV64-NEXT:    ret void
13271 //
test_vsuxseg7ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)13272 void test_vsuxseg7ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
13273   return vsuxseg7ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13274 }
13275 
13276 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f32mf2_m(
13277 // CHECK-RV64-NEXT:  entry:
13278 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i8.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13279 // CHECK-RV64-NEXT:    ret void
13280 //
test_vsuxseg8ei8_v_f32mf2_m(vbool64_t mask,float * base,vuint8mf8_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)13281 void test_vsuxseg8ei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
13282   return vsuxseg8ei8_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13283 }
13284 
13285 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m1_m(
13286 // CHECK-RV64-NEXT:  entry:
13287 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13288 // CHECK-RV64-NEXT:    ret void
13289 //
test_vsuxseg2ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)13290 void test_vsuxseg2ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
13291   return vsuxseg2ei8_v_f32m1_m(mask, base, bindex, v0, v1, vl);
13292 }
13293 
13294 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32m1_m(
13295 // CHECK-RV64-NEXT:  entry:
13296 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13297 // CHECK-RV64-NEXT:    ret void
13298 //
test_vsuxseg3ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)13299 void test_vsuxseg3ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
13300   return vsuxseg3ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, vl);
13301 }
13302 
13303 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32m1_m(
13304 // CHECK-RV64-NEXT:  entry:
13305 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13306 // CHECK-RV64-NEXT:    ret void
13307 //
test_vsuxseg4ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)13308 void test_vsuxseg4ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
13309   return vsuxseg4ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13310 }
13311 
13312 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f32m1_m(
13313 // CHECK-RV64-NEXT:  entry:
13314 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13315 // CHECK-RV64-NEXT:    ret void
13316 //
test_vsuxseg5ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)13317 void test_vsuxseg5ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
13318   return vsuxseg5ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13319 }
13320 
13321 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f32m1_m(
13322 // CHECK-RV64-NEXT:  entry:
13323 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13324 // CHECK-RV64-NEXT:    ret void
13325 //
test_vsuxseg6ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)13326 void test_vsuxseg6ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
13327   return vsuxseg6ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13328 }
13329 
13330 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f32m1_m(
13331 // CHECK-RV64-NEXT:  entry:
13332 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13333 // CHECK-RV64-NEXT:    ret void
13334 //
test_vsuxseg7ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)13335 void test_vsuxseg7ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
13336   return vsuxseg7ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13337 }
13338 
13339 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f32m1_m(
13340 // CHECK-RV64-NEXT:  entry:
13341 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i8.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13342 // CHECK-RV64-NEXT:    ret void
13343 //
test_vsuxseg8ei8_v_f32m1_m(vbool32_t mask,float * base,vuint8mf4_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)13344 void test_vsuxseg8ei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
13345   return vsuxseg8ei8_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13346 }
13347 
13348 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m2_m(
13349 // CHECK-RV64-NEXT:  entry:
13350 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13351 // CHECK-RV64-NEXT:    ret void
13352 //
test_vsuxseg2ei8_v_f32m2_m(vbool16_t mask,float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)13353 void test_vsuxseg2ei8_v_f32m2_m (vbool16_t mask, float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
13354   return vsuxseg2ei8_v_f32m2_m(mask, base, bindex, v0, v1, vl);
13355 }
13356 
13357 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f32m2_m(
13358 // CHECK-RV64-NEXT:  entry:
13359 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13360 // CHECK-RV64-NEXT:    ret void
13361 //
test_vsuxseg3ei8_v_f32m2_m(vbool16_t mask,float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)13362 void test_vsuxseg3ei8_v_f32m2_m (vbool16_t mask, float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
13363   return vsuxseg3ei8_v_f32m2_m(mask, base, bindex, v0, v1, v2, vl);
13364 }
13365 
13366 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f32m2_m(
13367 // CHECK-RV64-NEXT:  entry:
13368 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i8.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13369 // CHECK-RV64-NEXT:    ret void
13370 //
test_vsuxseg4ei8_v_f32m2_m(vbool16_t mask,float * base,vuint8mf2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)13371 void test_vsuxseg4ei8_v_f32m2_m (vbool16_t mask, float *base, vuint8mf2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
13372   return vsuxseg4ei8_v_f32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13373 }
13374 
13375 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f32m4_m(
13376 // CHECK-RV64-NEXT:  entry:
13377 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i8.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i8> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13378 // CHECK-RV64-NEXT:    ret void
13379 //
test_vsuxseg2ei8_v_f32m4_m(vbool8_t mask,float * base,vuint8m1_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)13380 void test_vsuxseg2ei8_v_f32m4_m (vbool8_t mask, float *base, vuint8m1_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
13381   return vsuxseg2ei8_v_f32m4_m(mask, base, bindex, v0, v1, vl);
13382 }
13383 
13384 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32mf2_m(
13385 // CHECK-RV64-NEXT:  entry:
13386 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13387 // CHECK-RV64-NEXT:    ret void
13388 //
test_vsuxseg2ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)13389 void test_vsuxseg2ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
13390   return vsuxseg2ei16_v_f32mf2_m(mask, base, bindex, v0, v1, vl);
13391 }
13392 
13393 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32mf2_m(
13394 // CHECK-RV64-NEXT:  entry:
13395 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13396 // CHECK-RV64-NEXT:    ret void
13397 //
test_vsuxseg3ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)13398 void test_vsuxseg3ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
13399   return vsuxseg3ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, vl);
13400 }
13401 
13402 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32mf2_m(
13403 // CHECK-RV64-NEXT:  entry:
13404 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13405 // CHECK-RV64-NEXT:    ret void
13406 //
test_vsuxseg4ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)13407 void test_vsuxseg4ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
13408   return vsuxseg4ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13409 }
13410 
13411 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f32mf2_m(
13412 // CHECK-RV64-NEXT:  entry:
13413 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13414 // CHECK-RV64-NEXT:    ret void
13415 //
test_vsuxseg5ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)13416 void test_vsuxseg5ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
13417   return vsuxseg5ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13418 }
13419 
13420 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f32mf2_m(
13421 // CHECK-RV64-NEXT:  entry:
13422 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13423 // CHECK-RV64-NEXT:    ret void
13424 //
test_vsuxseg6ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)13425 void test_vsuxseg6ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
13426   return vsuxseg6ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13427 }
13428 
13429 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f32mf2_m(
13430 // CHECK-RV64-NEXT:  entry:
13431 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13432 // CHECK-RV64-NEXT:    ret void
13433 //
test_vsuxseg7ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)13434 void test_vsuxseg7ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
13435   return vsuxseg7ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13436 }
13437 
13438 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f32mf2_m(
13439 // CHECK-RV64-NEXT:  entry:
13440 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i16.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13441 // CHECK-RV64-NEXT:    ret void
13442 //
test_vsuxseg8ei16_v_f32mf2_m(vbool64_t mask,float * base,vuint16mf4_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)13443 void test_vsuxseg8ei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
13444   return vsuxseg8ei16_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13445 }
13446 
13447 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m1_m(
13448 // CHECK-RV64-NEXT:  entry:
13449 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13450 // CHECK-RV64-NEXT:    ret void
13451 //
test_vsuxseg2ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)13452 void test_vsuxseg2ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
13453   return vsuxseg2ei16_v_f32m1_m(mask, base, bindex, v0, v1, vl);
13454 }
13455 
13456 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32m1_m(
13457 // CHECK-RV64-NEXT:  entry:
13458 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13459 // CHECK-RV64-NEXT:    ret void
13460 //
test_vsuxseg3ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)13461 void test_vsuxseg3ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
13462   return vsuxseg3ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, vl);
13463 }
13464 
13465 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32m1_m(
13466 // CHECK-RV64-NEXT:  entry:
13467 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13468 // CHECK-RV64-NEXT:    ret void
13469 //
test_vsuxseg4ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)13470 void test_vsuxseg4ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
13471   return vsuxseg4ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13472 }
13473 
13474 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f32m1_m(
13475 // CHECK-RV64-NEXT:  entry:
13476 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13477 // CHECK-RV64-NEXT:    ret void
13478 //
test_vsuxseg5ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)13479 void test_vsuxseg5ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
13480   return vsuxseg5ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13481 }
13482 
13483 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f32m1_m(
13484 // CHECK-RV64-NEXT:  entry:
13485 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13486 // CHECK-RV64-NEXT:    ret void
13487 //
test_vsuxseg6ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)13488 void test_vsuxseg6ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
13489   return vsuxseg6ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13490 }
13491 
13492 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f32m1_m(
13493 // CHECK-RV64-NEXT:  entry:
13494 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13495 // CHECK-RV64-NEXT:    ret void
13496 //
test_vsuxseg7ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)13497 void test_vsuxseg7ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
13498   return vsuxseg7ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13499 }
13500 
13501 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f32m1_m(
13502 // CHECK-RV64-NEXT:  entry:
13503 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i16.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13504 // CHECK-RV64-NEXT:    ret void
13505 //
test_vsuxseg8ei16_v_f32m1_m(vbool32_t mask,float * base,vuint16mf2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)13506 void test_vsuxseg8ei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
13507   return vsuxseg8ei16_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13508 }
13509 
13510 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m2_m(
13511 // CHECK-RV64-NEXT:  entry:
13512 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13513 // CHECK-RV64-NEXT:    ret void
13514 //
test_vsuxseg2ei16_v_f32m2_m(vbool16_t mask,float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)13515 void test_vsuxseg2ei16_v_f32m2_m (vbool16_t mask, float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
13516   return vsuxseg2ei16_v_f32m2_m(mask, base, bindex, v0, v1, vl);
13517 }
13518 
13519 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f32m2_m(
13520 // CHECK-RV64-NEXT:  entry:
13521 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13522 // CHECK-RV64-NEXT:    ret void
13523 //
test_vsuxseg3ei16_v_f32m2_m(vbool16_t mask,float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)13524 void test_vsuxseg3ei16_v_f32m2_m (vbool16_t mask, float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
13525   return vsuxseg3ei16_v_f32m2_m(mask, base, bindex, v0, v1, v2, vl);
13526 }
13527 
13528 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f32m2_m(
13529 // CHECK-RV64-NEXT:  entry:
13530 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i16.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13531 // CHECK-RV64-NEXT:    ret void
13532 //
test_vsuxseg4ei16_v_f32m2_m(vbool16_t mask,float * base,vuint16m1_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)13533 void test_vsuxseg4ei16_v_f32m2_m (vbool16_t mask, float *base, vuint16m1_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
13534   return vsuxseg4ei16_v_f32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13535 }
13536 
13537 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f32m4_m(
13538 // CHECK-RV64-NEXT:  entry:
13539 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i16.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13540 // CHECK-RV64-NEXT:    ret void
13541 //
test_vsuxseg2ei16_v_f32m4_m(vbool8_t mask,float * base,vuint16m2_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)13542 void test_vsuxseg2ei16_v_f32m4_m (vbool8_t mask, float *base, vuint16m2_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
13543   return vsuxseg2ei16_v_f32m4_m(mask, base, bindex, v0, v1, vl);
13544 }
13545 
13546 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32mf2_m(
13547 // CHECK-RV64-NEXT:  entry:
13548 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13549 // CHECK-RV64-NEXT:    ret void
13550 //
test_vsuxseg2ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)13551 void test_vsuxseg2ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
13552   return vsuxseg2ei32_v_f32mf2_m(mask, base, bindex, v0, v1, vl);
13553 }
13554 
13555 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32mf2_m(
13556 // CHECK-RV64-NEXT:  entry:
13557 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13558 // CHECK-RV64-NEXT:    ret void
13559 //
test_vsuxseg3ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)13560 void test_vsuxseg3ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
13561   return vsuxseg3ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, vl);
13562 }
13563 
13564 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32mf2_m(
13565 // CHECK-RV64-NEXT:  entry:
13566 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13567 // CHECK-RV64-NEXT:    ret void
13568 //
test_vsuxseg4ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)13569 void test_vsuxseg4ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
13570   return vsuxseg4ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13571 }
13572 
13573 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f32mf2_m(
13574 // CHECK-RV64-NEXT:  entry:
13575 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13576 // CHECK-RV64-NEXT:    ret void
13577 //
test_vsuxseg5ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)13578 void test_vsuxseg5ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
13579   return vsuxseg5ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13580 }
13581 
13582 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f32mf2_m(
13583 // CHECK-RV64-NEXT:  entry:
13584 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13585 // CHECK-RV64-NEXT:    ret void
13586 //
test_vsuxseg6ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)13587 void test_vsuxseg6ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
13588   return vsuxseg6ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13589 }
13590 
13591 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f32mf2_m(
13592 // CHECK-RV64-NEXT:  entry:
13593 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13594 // CHECK-RV64-NEXT:    ret void
13595 //
test_vsuxseg7ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)13596 void test_vsuxseg7ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
13597   return vsuxseg7ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13598 }
13599 
13600 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f32mf2_m(
13601 // CHECK-RV64-NEXT:  entry:
13602 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i32.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13603 // CHECK-RV64-NEXT:    ret void
13604 //
test_vsuxseg8ei32_v_f32mf2_m(vbool64_t mask,float * base,vuint32mf2_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)13605 void test_vsuxseg8ei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
13606   return vsuxseg8ei32_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13607 }
13608 
13609 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m1_m(
13610 // CHECK-RV64-NEXT:  entry:
13611 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13612 // CHECK-RV64-NEXT:    ret void
13613 //
test_vsuxseg2ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)13614 void test_vsuxseg2ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
13615   return vsuxseg2ei32_v_f32m1_m(mask, base, bindex, v0, v1, vl);
13616 }
13617 
13618 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32m1_m(
13619 // CHECK-RV64-NEXT:  entry:
13620 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13621 // CHECK-RV64-NEXT:    ret void
13622 //
test_vsuxseg3ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)13623 void test_vsuxseg3ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
13624   return vsuxseg3ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, vl);
13625 }
13626 
13627 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32m1_m(
13628 // CHECK-RV64-NEXT:  entry:
13629 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13630 // CHECK-RV64-NEXT:    ret void
13631 //
test_vsuxseg4ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)13632 void test_vsuxseg4ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
13633   return vsuxseg4ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13634 }
13635 
13636 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f32m1_m(
13637 // CHECK-RV64-NEXT:  entry:
13638 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13639 // CHECK-RV64-NEXT:    ret void
13640 //
test_vsuxseg5ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)13641 void test_vsuxseg5ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
13642   return vsuxseg5ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13643 }
13644 
13645 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f32m1_m(
13646 // CHECK-RV64-NEXT:  entry:
13647 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13648 // CHECK-RV64-NEXT:    ret void
13649 //
test_vsuxseg6ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)13650 void test_vsuxseg6ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
13651   return vsuxseg6ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13652 }
13653 
13654 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f32m1_m(
13655 // CHECK-RV64-NEXT:  entry:
13656 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13657 // CHECK-RV64-NEXT:    ret void
13658 //
test_vsuxseg7ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)13659 void test_vsuxseg7ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
13660   return vsuxseg7ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13661 }
13662 
13663 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f32m1_m(
13664 // CHECK-RV64-NEXT:  entry:
13665 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i32.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13666 // CHECK-RV64-NEXT:    ret void
13667 //
test_vsuxseg8ei32_v_f32m1_m(vbool32_t mask,float * base,vuint32m1_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)13668 void test_vsuxseg8ei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
13669   return vsuxseg8ei32_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13670 }
13671 
13672 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m2_m(
13673 // CHECK-RV64-NEXT:  entry:
13674 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13675 // CHECK-RV64-NEXT:    ret void
13676 //
test_vsuxseg2ei32_v_f32m2_m(vbool16_t mask,float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)13677 void test_vsuxseg2ei32_v_f32m2_m (vbool16_t mask, float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
13678   return vsuxseg2ei32_v_f32m2_m(mask, base, bindex, v0, v1, vl);
13679 }
13680 
13681 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f32m2_m(
13682 // CHECK-RV64-NEXT:  entry:
13683 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13684 // CHECK-RV64-NEXT:    ret void
13685 //
test_vsuxseg3ei32_v_f32m2_m(vbool16_t mask,float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)13686 void test_vsuxseg3ei32_v_f32m2_m (vbool16_t mask, float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
13687   return vsuxseg3ei32_v_f32m2_m(mask, base, bindex, v0, v1, v2, vl);
13688 }
13689 
13690 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f32m2_m(
13691 // CHECK-RV64-NEXT:  entry:
13692 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i32.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13693 // CHECK-RV64-NEXT:    ret void
13694 //
test_vsuxseg4ei32_v_f32m2_m(vbool16_t mask,float * base,vuint32m2_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)13695 void test_vsuxseg4ei32_v_f32m2_m (vbool16_t mask, float *base, vuint32m2_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
13696   return vsuxseg4ei32_v_f32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13697 }
13698 
13699 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f32m4_m(
13700 // CHECK-RV64-NEXT:  entry:
13701 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i32.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i32> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13702 // CHECK-RV64-NEXT:    ret void
13703 //
test_vsuxseg2ei32_v_f32m4_m(vbool8_t mask,float * base,vuint32m4_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)13704 void test_vsuxseg2ei32_v_f32m4_m (vbool8_t mask, float *base, vuint32m4_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
13705   return vsuxseg2ei32_v_f32m4_m(mask, base, bindex, v0, v1, vl);
13706 }
13707 
13708 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32mf2_m(
13709 // CHECK-RV64-NEXT:  entry:
13710 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13711 // CHECK-RV64-NEXT:    ret void
13712 //
test_vsuxseg2ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,size_t vl)13713 void test_vsuxseg2ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, size_t vl) {
13714   return vsuxseg2ei64_v_f32mf2_m(mask, base, bindex, v0, v1, vl);
13715 }
13716 
13717 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32mf2_m(
13718 // CHECK-RV64-NEXT:  entry:
13719 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13720 // CHECK-RV64-NEXT:    ret void
13721 //
test_vsuxseg3ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,size_t vl)13722 void test_vsuxseg3ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, size_t vl) {
13723   return vsuxseg3ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, vl);
13724 }
13725 
13726 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32mf2_m(
13727 // CHECK-RV64-NEXT:  entry:
13728 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13729 // CHECK-RV64-NEXT:    ret void
13730 //
test_vsuxseg4ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,size_t vl)13731 void test_vsuxseg4ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, size_t vl) {
13732   return vsuxseg4ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13733 }
13734 
13735 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f32mf2_m(
13736 // CHECK-RV64-NEXT:  entry:
13737 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13738 // CHECK-RV64-NEXT:    ret void
13739 //
test_vsuxseg5ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,size_t vl)13740 void test_vsuxseg5ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, size_t vl) {
13741   return vsuxseg5ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13742 }
13743 
13744 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f32mf2_m(
13745 // CHECK-RV64-NEXT:  entry:
13746 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13747 // CHECK-RV64-NEXT:    ret void
13748 //
test_vsuxseg6ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,size_t vl)13749 void test_vsuxseg6ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, size_t vl) {
13750   return vsuxseg6ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13751 }
13752 
13753 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f32mf2_m(
13754 // CHECK-RV64-NEXT:  entry:
13755 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13756 // CHECK-RV64-NEXT:    ret void
13757 //
test_vsuxseg7ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,size_t vl)13758 void test_vsuxseg7ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, size_t vl) {
13759   return vsuxseg7ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13760 }
13761 
13762 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f32mf2_m(
13763 // CHECK-RV64-NEXT:  entry:
13764 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f32.nxv1i64.i64(<vscale x 1 x float> [[V0:%.*]], <vscale x 1 x float> [[V1:%.*]], <vscale x 1 x float> [[V2:%.*]], <vscale x 1 x float> [[V3:%.*]], <vscale x 1 x float> [[V4:%.*]], <vscale x 1 x float> [[V5:%.*]], <vscale x 1 x float> [[V6:%.*]], <vscale x 1 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13765 // CHECK-RV64-NEXT:    ret void
13766 //
test_vsuxseg8ei64_v_f32mf2_m(vbool64_t mask,float * base,vuint64m1_t bindex,vfloat32mf2_t v0,vfloat32mf2_t v1,vfloat32mf2_t v2,vfloat32mf2_t v3,vfloat32mf2_t v4,vfloat32mf2_t v5,vfloat32mf2_t v6,vfloat32mf2_t v7,size_t vl)13767 void test_vsuxseg8ei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t v0, vfloat32mf2_t v1, vfloat32mf2_t v2, vfloat32mf2_t v3, vfloat32mf2_t v4, vfloat32mf2_t v5, vfloat32mf2_t v6, vfloat32mf2_t v7, size_t vl) {
13768   return vsuxseg8ei64_v_f32mf2_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13769 }
13770 
13771 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m1_m(
13772 // CHECK-RV64-NEXT:  entry:
13773 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13774 // CHECK-RV64-NEXT:    ret void
13775 //
test_vsuxseg2ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,size_t vl)13776 void test_vsuxseg2ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, size_t vl) {
13777   return vsuxseg2ei64_v_f32m1_m(mask, base, bindex, v0, v1, vl);
13778 }
13779 
13780 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32m1_m(
13781 // CHECK-RV64-NEXT:  entry:
13782 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13783 // CHECK-RV64-NEXT:    ret void
13784 //
test_vsuxseg3ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,size_t vl)13785 void test_vsuxseg3ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, size_t vl) {
13786   return vsuxseg3ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, vl);
13787 }
13788 
13789 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32m1_m(
13790 // CHECK-RV64-NEXT:  entry:
13791 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13792 // CHECK-RV64-NEXT:    ret void
13793 //
test_vsuxseg4ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,size_t vl)13794 void test_vsuxseg4ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, size_t vl) {
13795   return vsuxseg4ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13796 }
13797 
13798 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f32m1_m(
13799 // CHECK-RV64-NEXT:  entry:
13800 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13801 // CHECK-RV64-NEXT:    ret void
13802 //
test_vsuxseg5ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,size_t vl)13803 void test_vsuxseg5ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, size_t vl) {
13804   return vsuxseg5ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13805 }
13806 
13807 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f32m1_m(
13808 // CHECK-RV64-NEXT:  entry:
13809 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13810 // CHECK-RV64-NEXT:    ret void
13811 //
test_vsuxseg6ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,size_t vl)13812 void test_vsuxseg6ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, size_t vl) {
13813   return vsuxseg6ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13814 }
13815 
13816 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f32m1_m(
13817 // CHECK-RV64-NEXT:  entry:
13818 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13819 // CHECK-RV64-NEXT:    ret void
13820 //
test_vsuxseg7ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,size_t vl)13821 void test_vsuxseg7ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, size_t vl) {
13822   return vsuxseg7ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13823 }
13824 
13825 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f32m1_m(
13826 // CHECK-RV64-NEXT:  entry:
13827 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv2f32.nxv2i64.i64(<vscale x 2 x float> [[V0:%.*]], <vscale x 2 x float> [[V1:%.*]], <vscale x 2 x float> [[V2:%.*]], <vscale x 2 x float> [[V3:%.*]], <vscale x 2 x float> [[V4:%.*]], <vscale x 2 x float> [[V5:%.*]], <vscale x 2 x float> [[V6:%.*]], <vscale x 2 x float> [[V7:%.*]], float* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13828 // CHECK-RV64-NEXT:    ret void
13829 //
test_vsuxseg8ei64_v_f32m1_m(vbool32_t mask,float * base,vuint64m2_t bindex,vfloat32m1_t v0,vfloat32m1_t v1,vfloat32m1_t v2,vfloat32m1_t v3,vfloat32m1_t v4,vfloat32m1_t v5,vfloat32m1_t v6,vfloat32m1_t v7,size_t vl)13830 void test_vsuxseg8ei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t v0, vfloat32m1_t v1, vfloat32m1_t v2, vfloat32m1_t v3, vfloat32m1_t v4, vfloat32m1_t v5, vfloat32m1_t v6, vfloat32m1_t v7, size_t vl) {
13831   return vsuxseg8ei64_v_f32m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13832 }
13833 
13834 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m2_m(
13835 // CHECK-RV64-NEXT:  entry:
13836 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13837 // CHECK-RV64-NEXT:    ret void
13838 //
test_vsuxseg2ei64_v_f32m2_m(vbool16_t mask,float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,size_t vl)13839 void test_vsuxseg2ei64_v_f32m2_m (vbool16_t mask, float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, size_t vl) {
13840   return vsuxseg2ei64_v_f32m2_m(mask, base, bindex, v0, v1, vl);
13841 }
13842 
13843 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f32m2_m(
13844 // CHECK-RV64-NEXT:  entry:
13845 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13846 // CHECK-RV64-NEXT:    ret void
13847 //
test_vsuxseg3ei64_v_f32m2_m(vbool16_t mask,float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,size_t vl)13848 void test_vsuxseg3ei64_v_f32m2_m (vbool16_t mask, float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, size_t vl) {
13849   return vsuxseg3ei64_v_f32m2_m(mask, base, bindex, v0, v1, v2, vl);
13850 }
13851 
13852 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f32m2_m(
13853 // CHECK-RV64-NEXT:  entry:
13854 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv4f32.nxv4i64.i64(<vscale x 4 x float> [[V0:%.*]], <vscale x 4 x float> [[V1:%.*]], <vscale x 4 x float> [[V2:%.*]], <vscale x 4 x float> [[V3:%.*]], float* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13855 // CHECK-RV64-NEXT:    ret void
13856 //
test_vsuxseg4ei64_v_f32m2_m(vbool16_t mask,float * base,vuint64m4_t bindex,vfloat32m2_t v0,vfloat32m2_t v1,vfloat32m2_t v2,vfloat32m2_t v3,size_t vl)13857 void test_vsuxseg4ei64_v_f32m2_m (vbool16_t mask, float *base, vuint64m4_t bindex, vfloat32m2_t v0, vfloat32m2_t v1, vfloat32m2_t v2, vfloat32m2_t v3, size_t vl) {
13858   return vsuxseg4ei64_v_f32m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13859 }
13860 
13861 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f32m4_m(
13862 // CHECK-RV64-NEXT:  entry:
13863 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv8f32.nxv8i64.i64(<vscale x 8 x float> [[V0:%.*]], <vscale x 8 x float> [[V1:%.*]], float* [[BASE:%.*]], <vscale x 8 x i64> [[BINDEX:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13864 // CHECK-RV64-NEXT:    ret void
13865 //
test_vsuxseg2ei64_v_f32m4_m(vbool8_t mask,float * base,vuint64m8_t bindex,vfloat32m4_t v0,vfloat32m4_t v1,size_t vl)13866 void test_vsuxseg2ei64_v_f32m4_m (vbool8_t mask, float *base, vuint64m8_t bindex, vfloat32m4_t v0, vfloat32m4_t v1, size_t vl) {
13867   return vsuxseg2ei64_v_f32m4_m(mask, base, bindex, v0, v1, vl);
13868 }
13869 
13870 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m1_m(
13871 // CHECK-RV64-NEXT:  entry:
13872 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13873 // CHECK-RV64-NEXT:    ret void
13874 //
test_vsuxseg2ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)13875 void test_vsuxseg2ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
13876   return vsuxseg2ei8_v_f64m1_m(mask, base, bindex, v0, v1, vl);
13877 }
13878 
13879 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f64m1_m(
13880 // CHECK-RV64-NEXT:  entry:
13881 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13882 // CHECK-RV64-NEXT:    ret void
13883 //
test_vsuxseg3ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)13884 void test_vsuxseg3ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
13885   return vsuxseg3ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, vl);
13886 }
13887 
13888 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f64m1_m(
13889 // CHECK-RV64-NEXT:  entry:
13890 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13891 // CHECK-RV64-NEXT:    ret void
13892 //
test_vsuxseg4ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)13893 void test_vsuxseg4ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
13894   return vsuxseg4ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13895 }
13896 
13897 // CHECK-RV64-LABEL: @test_vsuxseg5ei8_v_f64m1_m(
13898 // CHECK-RV64-NEXT:  entry:
13899 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13900 // CHECK-RV64-NEXT:    ret void
13901 //
test_vsuxseg5ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)13902 void test_vsuxseg5ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
13903   return vsuxseg5ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
13904 }
13905 
13906 // CHECK-RV64-LABEL: @test_vsuxseg6ei8_v_f64m1_m(
13907 // CHECK-RV64-NEXT:  entry:
13908 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13909 // CHECK-RV64-NEXT:    ret void
13910 //
test_vsuxseg6ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)13911 void test_vsuxseg6ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
13912   return vsuxseg6ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
13913 }
13914 
13915 // CHECK-RV64-LABEL: @test_vsuxseg7ei8_v_f64m1_m(
13916 // CHECK-RV64-NEXT:  entry:
13917 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13918 // CHECK-RV64-NEXT:    ret void
13919 //
test_vsuxseg7ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)13920 void test_vsuxseg7ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
13921   return vsuxseg7ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
13922 }
13923 
13924 // CHECK-RV64-LABEL: @test_vsuxseg8ei8_v_f64m1_m(
13925 // CHECK-RV64-NEXT:  entry:
13926 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i8.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i8> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13927 // CHECK-RV64-NEXT:    ret void
13928 //
test_vsuxseg8ei8_v_f64m1_m(vbool64_t mask,double * base,vuint8mf8_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)13929 void test_vsuxseg8ei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
13930   return vsuxseg8ei8_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
13931 }
13932 
13933 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m2_m(
13934 // CHECK-RV64-NEXT:  entry:
13935 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13936 // CHECK-RV64-NEXT:    ret void
13937 //
test_vsuxseg2ei8_v_f64m2_m(vbool32_t mask,double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)13938 void test_vsuxseg2ei8_v_f64m2_m (vbool32_t mask, double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
13939   return vsuxseg2ei8_v_f64m2_m(mask, base, bindex, v0, v1, vl);
13940 }
13941 
13942 // CHECK-RV64-LABEL: @test_vsuxseg3ei8_v_f64m2_m(
13943 // CHECK-RV64-NEXT:  entry:
13944 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13945 // CHECK-RV64-NEXT:    ret void
13946 //
test_vsuxseg3ei8_v_f64m2_m(vbool32_t mask,double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)13947 void test_vsuxseg3ei8_v_f64m2_m (vbool32_t mask, double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
13948   return vsuxseg3ei8_v_f64m2_m(mask, base, bindex, v0, v1, v2, vl);
13949 }
13950 
13951 // CHECK-RV64-LABEL: @test_vsuxseg4ei8_v_f64m2_m(
13952 // CHECK-RV64-NEXT:  entry:
13953 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i8.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i8> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13954 // CHECK-RV64-NEXT:    ret void
13955 //
test_vsuxseg4ei8_v_f64m2_m(vbool32_t mask,double * base,vuint8mf4_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)13956 void test_vsuxseg4ei8_v_f64m2_m (vbool32_t mask, double *base, vuint8mf4_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
13957   return vsuxseg4ei8_v_f64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
13958 }
13959 
13960 // CHECK-RV64-LABEL: @test_vsuxseg2ei8_v_f64m4_m(
13961 // CHECK-RV64-NEXT:  entry:
13962 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i8.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i8> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13963 // CHECK-RV64-NEXT:    ret void
13964 //
test_vsuxseg2ei8_v_f64m4_m(vbool16_t mask,double * base,vuint8mf2_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)13965 void test_vsuxseg2ei8_v_f64m4_m (vbool16_t mask, double *base, vuint8mf2_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
13966   return vsuxseg2ei8_v_f64m4_m(mask, base, bindex, v0, v1, vl);
13967 }
13968 
13969 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m1_m(
13970 // CHECK-RV64-NEXT:  entry:
13971 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13972 // CHECK-RV64-NEXT:    ret void
13973 //
test_vsuxseg2ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)13974 void test_vsuxseg2ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
13975   return vsuxseg2ei16_v_f64m1_m(mask, base, bindex, v0, v1, vl);
13976 }
13977 
13978 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f64m1_m(
13979 // CHECK-RV64-NEXT:  entry:
13980 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13981 // CHECK-RV64-NEXT:    ret void
13982 //
test_vsuxseg3ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)13983 void test_vsuxseg3ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
13984   return vsuxseg3ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, vl);
13985 }
13986 
13987 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f64m1_m(
13988 // CHECK-RV64-NEXT:  entry:
13989 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13990 // CHECK-RV64-NEXT:    ret void
13991 //
test_vsuxseg4ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)13992 void test_vsuxseg4ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
13993   return vsuxseg4ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
13994 }
13995 
13996 // CHECK-RV64-LABEL: @test_vsuxseg5ei16_v_f64m1_m(
13997 // CHECK-RV64-NEXT:  entry:
13998 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
13999 // CHECK-RV64-NEXT:    ret void
14000 //
test_vsuxseg5ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)14001 void test_vsuxseg5ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
14002   return vsuxseg5ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
14003 }
14004 
14005 // CHECK-RV64-LABEL: @test_vsuxseg6ei16_v_f64m1_m(
14006 // CHECK-RV64-NEXT:  entry:
14007 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14008 // CHECK-RV64-NEXT:    ret void
14009 //
test_vsuxseg6ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)14010 void test_vsuxseg6ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
14011   return vsuxseg6ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
14012 }
14013 
14014 // CHECK-RV64-LABEL: @test_vsuxseg7ei16_v_f64m1_m(
14015 // CHECK-RV64-NEXT:  entry:
14016 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14017 // CHECK-RV64-NEXT:    ret void
14018 //
test_vsuxseg7ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)14019 void test_vsuxseg7ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
14020   return vsuxseg7ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
14021 }
14022 
14023 // CHECK-RV64-LABEL: @test_vsuxseg8ei16_v_f64m1_m(
14024 // CHECK-RV64-NEXT:  entry:
14025 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i16.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14026 // CHECK-RV64-NEXT:    ret void
14027 //
test_vsuxseg8ei16_v_f64m1_m(vbool64_t mask,double * base,vuint16mf4_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)14028 void test_vsuxseg8ei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
14029   return vsuxseg8ei16_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
14030 }
14031 
14032 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m2_m(
14033 // CHECK-RV64-NEXT:  entry:
14034 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14035 // CHECK-RV64-NEXT:    ret void
14036 //
test_vsuxseg2ei16_v_f64m2_m(vbool32_t mask,double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)14037 void test_vsuxseg2ei16_v_f64m2_m (vbool32_t mask, double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
14038   return vsuxseg2ei16_v_f64m2_m(mask, base, bindex, v0, v1, vl);
14039 }
14040 
14041 // CHECK-RV64-LABEL: @test_vsuxseg3ei16_v_f64m2_m(
14042 // CHECK-RV64-NEXT:  entry:
14043 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14044 // CHECK-RV64-NEXT:    ret void
14045 //
test_vsuxseg3ei16_v_f64m2_m(vbool32_t mask,double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)14046 void test_vsuxseg3ei16_v_f64m2_m (vbool32_t mask, double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
14047   return vsuxseg3ei16_v_f64m2_m(mask, base, bindex, v0, v1, v2, vl);
14048 }
14049 
14050 // CHECK-RV64-LABEL: @test_vsuxseg4ei16_v_f64m2_m(
14051 // CHECK-RV64-NEXT:  entry:
14052 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i16.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14053 // CHECK-RV64-NEXT:    ret void
14054 //
test_vsuxseg4ei16_v_f64m2_m(vbool32_t mask,double * base,vuint16mf2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)14055 void test_vsuxseg4ei16_v_f64m2_m (vbool32_t mask, double *base, vuint16mf2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
14056   return vsuxseg4ei16_v_f64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
14057 }
14058 
14059 // CHECK-RV64-LABEL: @test_vsuxseg2ei16_v_f64m4_m(
14060 // CHECK-RV64-NEXT:  entry:
14061 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i16.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14062 // CHECK-RV64-NEXT:    ret void
14063 //
test_vsuxseg2ei16_v_f64m4_m(vbool16_t mask,double * base,vuint16m1_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)14064 void test_vsuxseg2ei16_v_f64m4_m (vbool16_t mask, double *base, vuint16m1_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
14065   return vsuxseg2ei16_v_f64m4_m(mask, base, bindex, v0, v1, vl);
14066 }
14067 
14068 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m1_m(
14069 // CHECK-RV64-NEXT:  entry:
14070 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14071 // CHECK-RV64-NEXT:    ret void
14072 //
test_vsuxseg2ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)14073 void test_vsuxseg2ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
14074   return vsuxseg2ei32_v_f64m1_m(mask, base, bindex, v0, v1, vl);
14075 }
14076 
14077 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f64m1_m(
14078 // CHECK-RV64-NEXT:  entry:
14079 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14080 // CHECK-RV64-NEXT:    ret void
14081 //
test_vsuxseg3ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)14082 void test_vsuxseg3ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
14083   return vsuxseg3ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, vl);
14084 }
14085 
14086 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f64m1_m(
14087 // CHECK-RV64-NEXT:  entry:
14088 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14089 // CHECK-RV64-NEXT:    ret void
14090 //
test_vsuxseg4ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)14091 void test_vsuxseg4ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
14092   return vsuxseg4ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
14093 }
14094 
14095 // CHECK-RV64-LABEL: @test_vsuxseg5ei32_v_f64m1_m(
14096 // CHECK-RV64-NEXT:  entry:
14097 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14098 // CHECK-RV64-NEXT:    ret void
14099 //
test_vsuxseg5ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)14100 void test_vsuxseg5ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
14101   return vsuxseg5ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
14102 }
14103 
14104 // CHECK-RV64-LABEL: @test_vsuxseg6ei32_v_f64m1_m(
14105 // CHECK-RV64-NEXT:  entry:
14106 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14107 // CHECK-RV64-NEXT:    ret void
14108 //
test_vsuxseg6ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)14109 void test_vsuxseg6ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
14110   return vsuxseg6ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
14111 }
14112 
14113 // CHECK-RV64-LABEL: @test_vsuxseg7ei32_v_f64m1_m(
14114 // CHECK-RV64-NEXT:  entry:
14115 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14116 // CHECK-RV64-NEXT:    ret void
14117 //
test_vsuxseg7ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)14118 void test_vsuxseg7ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
14119   return vsuxseg7ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
14120 }
14121 
14122 // CHECK-RV64-LABEL: @test_vsuxseg8ei32_v_f64m1_m(
14123 // CHECK-RV64-NEXT:  entry:
14124 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i32.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i32> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14125 // CHECK-RV64-NEXT:    ret void
14126 //
test_vsuxseg8ei32_v_f64m1_m(vbool64_t mask,double * base,vuint32mf2_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)14127 void test_vsuxseg8ei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
14128   return vsuxseg8ei32_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
14129 }
14130 
14131 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m2_m(
14132 // CHECK-RV64-NEXT:  entry:
14133 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14134 // CHECK-RV64-NEXT:    ret void
14135 //
test_vsuxseg2ei32_v_f64m2_m(vbool32_t mask,double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)14136 void test_vsuxseg2ei32_v_f64m2_m (vbool32_t mask, double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
14137   return vsuxseg2ei32_v_f64m2_m(mask, base, bindex, v0, v1, vl);
14138 }
14139 
14140 // CHECK-RV64-LABEL: @test_vsuxseg3ei32_v_f64m2_m(
14141 // CHECK-RV64-NEXT:  entry:
14142 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14143 // CHECK-RV64-NEXT:    ret void
14144 //
test_vsuxseg3ei32_v_f64m2_m(vbool32_t mask,double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)14145 void test_vsuxseg3ei32_v_f64m2_m (vbool32_t mask, double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
14146   return vsuxseg3ei32_v_f64m2_m(mask, base, bindex, v0, v1, v2, vl);
14147 }
14148 
14149 // CHECK-RV64-LABEL: @test_vsuxseg4ei32_v_f64m2_m(
14150 // CHECK-RV64-NEXT:  entry:
14151 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i32.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i32> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14152 // CHECK-RV64-NEXT:    ret void
14153 //
test_vsuxseg4ei32_v_f64m2_m(vbool32_t mask,double * base,vuint32m1_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)14154 void test_vsuxseg4ei32_v_f64m2_m (vbool32_t mask, double *base, vuint32m1_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
14155   return vsuxseg4ei32_v_f64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
14156 }
14157 
14158 // CHECK-RV64-LABEL: @test_vsuxseg2ei32_v_f64m4_m(
14159 // CHECK-RV64-NEXT:  entry:
14160 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i32.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i32> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14161 // CHECK-RV64-NEXT:    ret void
14162 //
test_vsuxseg2ei32_v_f64m4_m(vbool16_t mask,double * base,vuint32m2_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)14163 void test_vsuxseg2ei32_v_f64m4_m (vbool16_t mask, double *base, vuint32m2_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
14164   return vsuxseg2ei32_v_f64m4_m(mask, base, bindex, v0, v1, vl);
14165 }
14166 
14167 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m1_m(
14168 // CHECK-RV64-NEXT:  entry:
14169 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14170 // CHECK-RV64-NEXT:    ret void
14171 //
test_vsuxseg2ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,size_t vl)14172 void test_vsuxseg2ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, size_t vl) {
14173   return vsuxseg2ei64_v_f64m1_m(mask, base, bindex, v0, v1, vl);
14174 }
14175 
14176 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f64m1_m(
14177 // CHECK-RV64-NEXT:  entry:
14178 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14179 // CHECK-RV64-NEXT:    ret void
14180 //
test_vsuxseg3ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,size_t vl)14181 void test_vsuxseg3ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, size_t vl) {
14182   return vsuxseg3ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, vl);
14183 }
14184 
14185 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f64m1_m(
14186 // CHECK-RV64-NEXT:  entry:
14187 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14188 // CHECK-RV64-NEXT:    ret void
14189 //
test_vsuxseg4ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,size_t vl)14190 void test_vsuxseg4ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, size_t vl) {
14191   return vsuxseg4ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, vl);
14192 }
14193 
14194 // CHECK-RV64-LABEL: @test_vsuxseg5ei64_v_f64m1_m(
14195 // CHECK-RV64-NEXT:  entry:
14196 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg5.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14197 // CHECK-RV64-NEXT:    ret void
14198 //
test_vsuxseg5ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,size_t vl)14199 void test_vsuxseg5ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, size_t vl) {
14200   return vsuxseg5ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, vl);
14201 }
14202 
14203 // CHECK-RV64-LABEL: @test_vsuxseg6ei64_v_f64m1_m(
14204 // CHECK-RV64-NEXT:  entry:
14205 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg6.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14206 // CHECK-RV64-NEXT:    ret void
14207 //
test_vsuxseg6ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,size_t vl)14208 void test_vsuxseg6ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, size_t vl) {
14209   return vsuxseg6ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, vl);
14210 }
14211 
14212 // CHECK-RV64-LABEL: @test_vsuxseg7ei64_v_f64m1_m(
14213 // CHECK-RV64-NEXT:  entry:
14214 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg7.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14215 // CHECK-RV64-NEXT:    ret void
14216 //
test_vsuxseg7ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,size_t vl)14217 void test_vsuxseg7ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, size_t vl) {
14218   return vsuxseg7ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, vl);
14219 }
14220 
14221 // CHECK-RV64-LABEL: @test_vsuxseg8ei64_v_f64m1_m(
14222 // CHECK-RV64-NEXT:  entry:
14223 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg8.mask.nxv1f64.nxv1i64.i64(<vscale x 1 x double> [[V0:%.*]], <vscale x 1 x double> [[V1:%.*]], <vscale x 1 x double> [[V2:%.*]], <vscale x 1 x double> [[V3:%.*]], <vscale x 1 x double> [[V4:%.*]], <vscale x 1 x double> [[V5:%.*]], <vscale x 1 x double> [[V6:%.*]], <vscale x 1 x double> [[V7:%.*]], double* [[BASE:%.*]], <vscale x 1 x i64> [[BINDEX:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14224 // CHECK-RV64-NEXT:    ret void
14225 //
test_vsuxseg8ei64_v_f64m1_m(vbool64_t mask,double * base,vuint64m1_t bindex,vfloat64m1_t v0,vfloat64m1_t v1,vfloat64m1_t v2,vfloat64m1_t v3,vfloat64m1_t v4,vfloat64m1_t v5,vfloat64m1_t v6,vfloat64m1_t v7,size_t vl)14226 void test_vsuxseg8ei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t v0, vfloat64m1_t v1, vfloat64m1_t v2, vfloat64m1_t v3, vfloat64m1_t v4, vfloat64m1_t v5, vfloat64m1_t v6, vfloat64m1_t v7, size_t vl) {
14227   return vsuxseg8ei64_v_f64m1_m(mask, base, bindex, v0, v1, v2, v3, v4, v5, v6, v7, vl);
14228 }
14229 
14230 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m2_m(
14231 // CHECK-RV64-NEXT:  entry:
14232 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14233 // CHECK-RV64-NEXT:    ret void
14234 //
test_vsuxseg2ei64_v_f64m2_m(vbool32_t mask,double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,size_t vl)14235 void test_vsuxseg2ei64_v_f64m2_m (vbool32_t mask, double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, size_t vl) {
14236   return vsuxseg2ei64_v_f64m2_m(mask, base, bindex, v0, v1, vl);
14237 }
14238 
14239 // CHECK-RV64-LABEL: @test_vsuxseg3ei64_v_f64m2_m(
14240 // CHECK-RV64-NEXT:  entry:
14241 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg3.mask.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14242 // CHECK-RV64-NEXT:    ret void
14243 //
test_vsuxseg3ei64_v_f64m2_m(vbool32_t mask,double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,size_t vl)14244 void test_vsuxseg3ei64_v_f64m2_m (vbool32_t mask, double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, size_t vl) {
14245   return vsuxseg3ei64_v_f64m2_m(mask, base, bindex, v0, v1, v2, vl);
14246 }
14247 
14248 // CHECK-RV64-LABEL: @test_vsuxseg4ei64_v_f64m2_m(
14249 // CHECK-RV64-NEXT:  entry:
14250 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg4.mask.nxv2f64.nxv2i64.i64(<vscale x 2 x double> [[V0:%.*]], <vscale x 2 x double> [[V1:%.*]], <vscale x 2 x double> [[V2:%.*]], <vscale x 2 x double> [[V3:%.*]], double* [[BASE:%.*]], <vscale x 2 x i64> [[BINDEX:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14251 // CHECK-RV64-NEXT:    ret void
14252 //
test_vsuxseg4ei64_v_f64m2_m(vbool32_t mask,double * base,vuint64m2_t bindex,vfloat64m2_t v0,vfloat64m2_t v1,vfloat64m2_t v2,vfloat64m2_t v3,size_t vl)14253 void test_vsuxseg4ei64_v_f64m2_m (vbool32_t mask, double *base, vuint64m2_t bindex, vfloat64m2_t v0, vfloat64m2_t v1, vfloat64m2_t v2, vfloat64m2_t v3, size_t vl) {
14254   return vsuxseg4ei64_v_f64m2_m(mask, base, bindex, v0, v1, v2, v3, vl);
14255 }
14256 
14257 // CHECK-RV64-LABEL: @test_vsuxseg2ei64_v_f64m4_m(
14258 // CHECK-RV64-NEXT:  entry:
14259 // CHECK-RV64-NEXT:    call void @llvm.riscv.vsuxseg2.mask.nxv4f64.nxv4i64.i64(<vscale x 4 x double> [[V0:%.*]], <vscale x 4 x double> [[V1:%.*]], double* [[BASE:%.*]], <vscale x 4 x i64> [[BINDEX:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
14260 // CHECK-RV64-NEXT:    ret void
14261 //
test_vsuxseg2ei64_v_f64m4_m(vbool16_t mask,double * base,vuint64m4_t bindex,vfloat64m4_t v0,vfloat64m4_t v1,size_t vl)14262 void test_vsuxseg2ei64_v_f64m4_m (vbool16_t mask, double *base, vuint64m4_t bindex, vfloat64m4_t v0, vfloat64m4_t v1, size_t vl) {
14263   return vsuxseg2ei64_v_f64m4_m(mask, base, bindex, v0, v1, vl);
14264 }
14265 
14266