1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +experimental-v -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
4 
5 #include <riscv_vector.h>
6 
7 //
8 // CHECK-RV64-LABEL: @test_vmsif_m_b1(
9 // CHECK-RV64-NEXT:  entry:
10 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.riscv.vmsif.nxv64i1.i64(<vscale x 64 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
11 // CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP0]]
12 //
test_vmsif_m_b1(vbool1_t op1,size_t vl)13 vbool1_t test_vmsif_m_b1(vbool1_t op1, size_t vl) {
14   return vmsif_m_b1(op1, vl);
15 }
16 
17 //
18 // CHECK-RV64-LABEL: @test_vmsif_m_b2(
19 // CHECK-RV64-NEXT:  entry:
20 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 32 x i1> @llvm.riscv.vmsif.nxv32i1.i64(<vscale x 32 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
21 // CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP0]]
22 //
test_vmsif_m_b2(vbool2_t op1,size_t vl)23 vbool2_t test_vmsif_m_b2(vbool2_t op1, size_t vl) {
24   return vmsif_m_b2(op1, vl);
25 }
26 
27 //
28 // CHECK-RV64-LABEL: @test_vmsif_m_b4(
29 // CHECK-RV64-NEXT:  entry:
30 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 16 x i1> @llvm.riscv.vmsif.nxv16i1.i64(<vscale x 16 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
31 // CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP0]]
32 //
test_vmsif_m_b4(vbool4_t op1,size_t vl)33 vbool4_t test_vmsif_m_b4(vbool4_t op1, size_t vl) {
34   return vmsif_m_b4(op1, vl);
35 }
36 
37 //
38 // CHECK-RV64-LABEL: @test_vmsif_m_b8(
39 // CHECK-RV64-NEXT:  entry:
40 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i1> @llvm.riscv.vmsif.nxv8i1.i64(<vscale x 8 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
41 // CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP0]]
42 //
test_vmsif_m_b8(vbool8_t op1,size_t vl)43 vbool8_t test_vmsif_m_b8(vbool8_t op1, size_t vl) {
44   return vmsif_m_b8(op1, vl);
45 }
46 
47 //
48 // CHECK-RV64-LABEL: @test_vmsif_m_b16(
49 // CHECK-RV64-NEXT:  entry:
50 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i1> @llvm.riscv.vmsif.nxv4i1.i64(<vscale x 4 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
51 // CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP0]]
52 //
test_vmsif_m_b16(vbool16_t op1,size_t vl)53 vbool16_t test_vmsif_m_b16(vbool16_t op1, size_t vl) {
54   return vmsif_m_b16(op1, vl);
55 }
56 
57 //
58 // CHECK-RV64-LABEL: @test_vmsif_m_b32(
59 // CHECK-RV64-NEXT:  entry:
60 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i1> @llvm.riscv.vmsif.nxv2i1.i64(<vscale x 2 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
61 // CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP0]]
62 //
test_vmsif_m_b32(vbool32_t op1,size_t vl)63 vbool32_t test_vmsif_m_b32(vbool32_t op1, size_t vl) {
64   return vmsif_m_b32(op1, vl);
65 }
66 
67 //
68 // CHECK-RV64-LABEL: @test_vmsif_m_b64(
69 // CHECK-RV64-NEXT:  entry:
70 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i1> @llvm.riscv.vmsif.nxv1i1.i64(<vscale x 1 x i1> [[OP1:%.*]], i64 [[VL:%.*]])
71 // CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP0]]
72 //
test_vmsif_m_b64(vbool64_t op1,size_t vl)73 vbool64_t test_vmsif_m_b64(vbool64_t op1, size_t vl) {
74   return vmsif_m_b64(op1, vl);
75 }
76 
77 //
78 // CHECK-RV64-LABEL: @test_vmsif_m_b1_m(
79 // CHECK-RV64-NEXT:  entry:
80 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.riscv.vmsif.mask.nxv64i1.i64(<vscale x 64 x i1> [[MASKEDOFF:%.*]], <vscale x 64 x i1> [[OP1:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
81 // CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP0]]
82 //
test_vmsif_m_b1_m(vbool1_t mask,vbool1_t maskedoff,vbool1_t op1,size_t vl)83 vbool1_t test_vmsif_m_b1_m(vbool1_t mask, vbool1_t maskedoff, vbool1_t op1,
84                            size_t vl) {
85   return vmsif_m_b1_m(mask, maskedoff, op1, vl);
86 }
87 
88 //
89 // CHECK-RV64-LABEL: @test_vmsif_m_b2_m(
90 // CHECK-RV64-NEXT:  entry:
91 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 32 x i1> @llvm.riscv.vmsif.mask.nxv32i1.i64(<vscale x 32 x i1> [[MASKEDOFF:%.*]], <vscale x 32 x i1> [[OP1:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
92 // CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP0]]
93 //
test_vmsif_m_b2_m(vbool2_t mask,vbool2_t maskedoff,vbool2_t op1,size_t vl)94 vbool2_t test_vmsif_m_b2_m(vbool2_t mask, vbool2_t maskedoff, vbool2_t op1,
95                            size_t vl) {
96   return vmsif_m_b2_m(mask, maskedoff, op1, vl);
97 }
98 
99 //
100 // CHECK-RV64-LABEL: @test_vmsif_m_b4_m(
101 // CHECK-RV64-NEXT:  entry:
102 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 16 x i1> @llvm.riscv.vmsif.mask.nxv16i1.i64(<vscale x 16 x i1> [[MASKEDOFF:%.*]], <vscale x 16 x i1> [[OP1:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
103 // CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP0]]
104 //
test_vmsif_m_b4_m(vbool4_t mask,vbool4_t maskedoff,vbool4_t op1,size_t vl)105 vbool4_t test_vmsif_m_b4_m(vbool4_t mask, vbool4_t maskedoff, vbool4_t op1,
106                            size_t vl) {
107   return vmsif_m_b4_m(mask, maskedoff, op1, vl);
108 }
109 
110 //
111 // CHECK-RV64-LABEL: @test_vmsif_m_b8_m(
112 // CHECK-RV64-NEXT:  entry:
113 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i1> @llvm.riscv.vmsif.mask.nxv8i1.i64(<vscale x 8 x i1> [[MASKEDOFF:%.*]], <vscale x 8 x i1> [[OP1:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
114 // CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP0]]
115 //
test_vmsif_m_b8_m(vbool8_t mask,vbool8_t maskedoff,vbool8_t op1,size_t vl)116 vbool8_t test_vmsif_m_b8_m(vbool8_t mask, vbool8_t maskedoff, vbool8_t op1,
117                            size_t vl) {
118   return vmsif_m_b8_m(mask, maskedoff, op1, vl);
119 }
120 
121 //
122 // CHECK-RV64-LABEL: @test_vmsif_m_b16_m(
123 // CHECK-RV64-NEXT:  entry:
124 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i1> @llvm.riscv.vmsif.mask.nxv4i1.i64(<vscale x 4 x i1> [[MASKEDOFF:%.*]], <vscale x 4 x i1> [[OP1:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
125 // CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP0]]
126 //
test_vmsif_m_b16_m(vbool16_t mask,vbool16_t maskedoff,vbool16_t op1,size_t vl)127 vbool16_t test_vmsif_m_b16_m(vbool16_t mask, vbool16_t maskedoff, vbool16_t op1,
128                              size_t vl) {
129   return vmsif_m_b16_m(mask, maskedoff, op1, vl);
130 }
131 
132 //
133 // CHECK-RV64-LABEL: @test_vmsif_m_b32_m(
134 // CHECK-RV64-NEXT:  entry:
135 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i1> @llvm.riscv.vmsif.mask.nxv2i1.i64(<vscale x 2 x i1> [[MASKEDOFF:%.*]], <vscale x 2 x i1> [[OP1:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
136 // CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP0]]
137 //
test_vmsif_m_b32_m(vbool32_t mask,vbool32_t maskedoff,vbool32_t op1,size_t vl)138 vbool32_t test_vmsif_m_b32_m(vbool32_t mask, vbool32_t maskedoff, vbool32_t op1,
139                              size_t vl) {
140   return vmsif_m_b32_m(mask, maskedoff, op1, vl);
141 }
142 
143 //
144 // CHECK-RV64-LABEL: @test_vmsif_m_b64_m(
145 // CHECK-RV64-NEXT:  entry:
146 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i1> @llvm.riscv.vmsif.mask.nxv1i1.i64(<vscale x 1 x i1> [[MASKEDOFF:%.*]], <vscale x 1 x i1> [[OP1:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
147 // CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP0]]
148 //
test_vmsif_m_b64_m(vbool64_t mask,vbool64_t maskedoff,vbool64_t op1,size_t vl)149 vbool64_t test_vmsif_m_b64_m(vbool64_t mask, vbool64_t maskedoff, vbool64_t op1,
150                              size_t vl) {
151   return vmsif_m_b64_m(mask, maskedoff, op1, vl);
152 }
153