Home
last modified time | relevance | path

Searched refs:vl (Results 251 – 275 of 5439) sorted by relevance

1...<<11121314151617181920>>...218

/dports/devel/wasi-libcxx/llvm-project-13.0.1.src/clang/test/CodeGen/RISCV/rvv-intrinsics/
H A Dvor.c14 return vor_vv_i8mf8(op1, op2, vl); in test_vor_vv_i8mf8()
24 return vor_vx_i8mf8(op1, op2, vl); in test_vor_vx_i8mf8()
34 return vor_vv_i8mf4(op1, op2, vl); in test_vor_vv_i8mf4()
44 return vor_vx_i8mf4(op1, op2, vl); in test_vor_vx_i8mf4()
54 return vor_vv_i8mf2(op1, op2, vl); in test_vor_vv_i8mf2()
64 return vor_vx_i8mf2(op1, op2, vl); in test_vor_vx_i8mf2()
74 return vor_vv_i8m1(op1, op2, vl); in test_vor_vv_i8m1()
84 return vor_vx_i8m1(op1, op2, vl); in test_vor_vx_i8m1()
94 return vor_vv_i8m2(op1, op2, vl); in test_vor_vv_i8m2()
104 return vor_vx_i8m2(op1, op2, vl); in test_vor_vx_i8m2()
[all …]
H A Dvsadd.c14 return vsadd_vv_i8mf8(op1, op2, vl); in test_vsadd_vv_i8mf8()
24 return vsadd_vx_i8mf8(op1, op2, vl); in test_vsadd_vx_i8mf8()
34 return vsadd_vv_i8mf4(op1, op2, vl); in test_vsadd_vv_i8mf4()
44 return vsadd_vx_i8mf4(op1, op2, vl); in test_vsadd_vx_i8mf4()
54 return vsadd_vv_i8mf2(op1, op2, vl); in test_vsadd_vv_i8mf2()
64 return vsadd_vx_i8mf2(op1, op2, vl); in test_vsadd_vx_i8mf2()
74 return vsadd_vv_i8m1(op1, op2, vl); in test_vsadd_vv_i8m1()
84 return vsadd_vx_i8m1(op1, op2, vl); in test_vsadd_vx_i8m1()
94 return vsadd_vv_i8m2(op1, op2, vl); in test_vsadd_vv_i8m2()
104 return vsadd_vx_i8m2(op1, op2, vl); in test_vsadd_vx_i8m2()
[all …]
H A Dvmsge.c14 return vmsge_vv_i8mf8_b64(op1, op2, vl); in test_vmsge_vv_i8mf8_b64()
24 return vmsge_vx_i8mf8_b64(op1, op2, vl); in test_vmsge_vx_i8mf8_b64()
34 return vmsge_vv_i8mf4_b32(op1, op2, vl); in test_vmsge_vv_i8mf4_b32()
44 return vmsge_vx_i8mf4_b32(op1, op2, vl); in test_vmsge_vx_i8mf4_b32()
74 return vmsge_vv_i8m1_b8(op1, op2, vl); in test_vmsge_vv_i8m1_b8()
84 return vmsge_vx_i8m1_b8(op1, op2, vl); in test_vmsge_vx_i8m1_b8()
94 return vmsge_vv_i8m2_b4(op1, op2, vl); in test_vmsge_vv_i8m2_b4()
104 return vmsge_vx_i8m2_b4(op1, op2, vl); in test_vmsge_vx_i8m2_b4()
114 return vmsge_vv_i8m4_b2(op1, op2, vl); in test_vmsge_vv_i8m4_b2()
124 return vmsge_vx_i8m4_b2(op1, op2, vl); in test_vmsge_vx_i8m4_b2()
[all …]
H A Dvmsle.c14 return vmsle_vv_i8mf8_b64(op1, op2, vl); in test_vmsle_vv_i8mf8_b64()
24 return vmsle_vx_i8mf8_b64(op1, op2, vl); in test_vmsle_vx_i8mf8_b64()
34 return vmsle_vv_i8mf4_b32(op1, op2, vl); in test_vmsle_vv_i8mf4_b32()
44 return vmsle_vx_i8mf4_b32(op1, op2, vl); in test_vmsle_vx_i8mf4_b32()
74 return vmsle_vv_i8m1_b8(op1, op2, vl); in test_vmsle_vv_i8m1_b8()
84 return vmsle_vx_i8m1_b8(op1, op2, vl); in test_vmsle_vx_i8m1_b8()
94 return vmsle_vv_i8m2_b4(op1, op2, vl); in test_vmsle_vv_i8m2_b4()
104 return vmsle_vx_i8m2_b4(op1, op2, vl); in test_vmsle_vx_i8m2_b4()
114 return vmsle_vv_i8m4_b2(op1, op2, vl); in test_vmsle_vv_i8m4_b2()
124 return vmsle_vx_i8m4_b2(op1, op2, vl); in test_vmsle_vx_i8m4_b2()
[all …]
H A Dvrem.c14 return vrem_vv_i8mf8(op1, op2, vl); in test_vrem_vv_i8mf8()
24 return vrem_vx_i8mf8(op1, op2, vl); in test_vrem_vx_i8mf8()
34 return vrem_vv_i8mf4(op1, op2, vl); in test_vrem_vv_i8mf4()
44 return vrem_vx_i8mf4(op1, op2, vl); in test_vrem_vx_i8mf4()
54 return vrem_vv_i8mf2(op1, op2, vl); in test_vrem_vv_i8mf2()
64 return vrem_vx_i8mf2(op1, op2, vl); in test_vrem_vx_i8mf2()
74 return vrem_vv_i8m1(op1, op2, vl); in test_vrem_vv_i8m1()
84 return vrem_vx_i8m1(op1, op2, vl); in test_vrem_vx_i8m1()
94 return vrem_vv_i8m2(op1, op2, vl); in test_vrem_vv_i8m2()
104 return vrem_vx_i8m2(op1, op2, vl); in test_vrem_vx_i8m2()
[all …]
H A Dvssub.c14 return vssub_vv_i8mf8(op1, op2, vl); in test_vssub_vv_i8mf8()
24 return vssub_vx_i8mf8(op1, op2, vl); in test_vssub_vx_i8mf8()
34 return vssub_vv_i8mf4(op1, op2, vl); in test_vssub_vv_i8mf4()
44 return vssub_vx_i8mf4(op1, op2, vl); in test_vssub_vx_i8mf4()
54 return vssub_vv_i8mf2(op1, op2, vl); in test_vssub_vv_i8mf2()
64 return vssub_vx_i8mf2(op1, op2, vl); in test_vssub_vx_i8mf2()
74 return vssub_vv_i8m1(op1, op2, vl); in test_vssub_vv_i8m1()
84 return vssub_vx_i8m1(op1, op2, vl); in test_vssub_vx_i8m1()
94 return vssub_vv_i8m2(op1, op2, vl); in test_vssub_vv_i8m2()
104 return vssub_vx_i8m2(op1, op2, vl); in test_vssub_vx_i8m2()
[all …]
H A Dvadd.c15 return vadd_vv_i8mf8(op1, op2, vl); in test_vadd_vv_i8mf8()
25 return vadd_vx_i8mf8(op1, op2, vl); in test_vadd_vx_i8mf8()
35 return vadd_vv_i8mf4(op1, op2, vl); in test_vadd_vv_i8mf4()
45 return vadd_vx_i8mf4(op1, op2, vl); in test_vadd_vx_i8mf4()
55 return vadd_vv_i8mf2(op1, op2, vl); in test_vadd_vv_i8mf2()
65 return vadd_vx_i8mf2(op1, op2, vl); in test_vadd_vx_i8mf2()
75 return vadd_vv_i8m1(op1, op2, vl); in test_vadd_vv_i8m1()
85 return vadd_vx_i8m1(op1, op2, vl); in test_vadd_vx_i8m1()
95 return vadd_vv_i8m2(op1, op2, vl); in test_vadd_vv_i8m2()
105 return vadd_vx_i8m2(op1, op2, vl); in test_vadd_vx_i8m2()
[all …]
H A Dvmin.c14 return vmin_vv_i8mf8(op1, op2, vl); in test_vmin_vv_i8mf8()
24 return vmin_vx_i8mf8(op1, op2, vl); in test_vmin_vx_i8mf8()
34 return vmin_vv_i8mf4(op1, op2, vl); in test_vmin_vv_i8mf4()
44 return vmin_vx_i8mf4(op1, op2, vl); in test_vmin_vx_i8mf4()
54 return vmin_vv_i8mf2(op1, op2, vl); in test_vmin_vv_i8mf2()
64 return vmin_vx_i8mf2(op1, op2, vl); in test_vmin_vx_i8mf2()
74 return vmin_vv_i8m1(op1, op2, vl); in test_vmin_vv_i8m1()
84 return vmin_vx_i8m1(op1, op2, vl); in test_vmin_vx_i8m1()
94 return vmin_vv_i8m2(op1, op2, vl); in test_vmin_vv_i8m2()
104 return vmin_vx_i8m2(op1, op2, vl); in test_vmin_vx_i8m2()
[all …]
H A Dvmsgt.c14 return vmsgt_vv_i8mf8_b64(op1, op2, vl); in test_vmsgt_vv_i8mf8_b64()
24 return vmsgt_vx_i8mf8_b64(op1, op2, vl); in test_vmsgt_vx_i8mf8_b64()
34 return vmsgt_vv_i8mf4_b32(op1, op2, vl); in test_vmsgt_vv_i8mf4_b32()
44 return vmsgt_vx_i8mf4_b32(op1, op2, vl); in test_vmsgt_vx_i8mf4_b32()
74 return vmsgt_vv_i8m1_b8(op1, op2, vl); in test_vmsgt_vv_i8m1_b8()
84 return vmsgt_vx_i8m1_b8(op1, op2, vl); in test_vmsgt_vx_i8m1_b8()
94 return vmsgt_vv_i8m2_b4(op1, op2, vl); in test_vmsgt_vv_i8m2_b4()
104 return vmsgt_vx_i8m2_b4(op1, op2, vl); in test_vmsgt_vx_i8m2_b4()
114 return vmsgt_vv_i8m4_b2(op1, op2, vl); in test_vmsgt_vv_i8m4_b2()
124 return vmsgt_vx_i8m4_b2(op1, op2, vl); in test_vmsgt_vx_i8m4_b2()
[all …]
H A Dvmslt.c14 return vmslt_vv_i8mf8_b64(op1, op2, vl); in test_vmslt_vv_i8mf8_b64()
24 return vmslt_vx_i8mf8_b64(op1, op2, vl); in test_vmslt_vx_i8mf8_b64()
34 return vmslt_vv_i8mf4_b32(op1, op2, vl); in test_vmslt_vv_i8mf4_b32()
44 return vmslt_vx_i8mf4_b32(op1, op2, vl); in test_vmslt_vx_i8mf4_b32()
74 return vmslt_vv_i8m1_b8(op1, op2, vl); in test_vmslt_vv_i8m1_b8()
84 return vmslt_vx_i8m1_b8(op1, op2, vl); in test_vmslt_vx_i8m1_b8()
94 return vmslt_vv_i8m2_b4(op1, op2, vl); in test_vmslt_vv_i8m2_b4()
104 return vmslt_vx_i8m2_b4(op1, op2, vl); in test_vmslt_vx_i8m2_b4()
114 return vmslt_vv_i8m4_b2(op1, op2, vl); in test_vmslt_vv_i8m4_b2()
124 return vmslt_vx_i8m4_b2(op1, op2, vl); in test_vmslt_vx_i8m4_b2()
[all …]
H A Dvdiv.c14 return vdiv_vv_i8mf8(op1, op2, vl); in test_vdiv_vv_i8mf8()
24 return vdiv_vx_i8mf8(op1, op2, vl); in test_vdiv_vx_i8mf8()
34 return vdiv_vv_i8mf4(op1, op2, vl); in test_vdiv_vv_i8mf4()
44 return vdiv_vx_i8mf4(op1, op2, vl); in test_vdiv_vx_i8mf4()
54 return vdiv_vv_i8mf2(op1, op2, vl); in test_vdiv_vv_i8mf2()
64 return vdiv_vx_i8mf2(op1, op2, vl); in test_vdiv_vx_i8mf2()
74 return vdiv_vv_i8m1(op1, op2, vl); in test_vdiv_vv_i8m1()
84 return vdiv_vx_i8m1(op1, op2, vl); in test_vdiv_vx_i8m1()
94 return vdiv_vv_i8m2(op1, op2, vl); in test_vdiv_vv_i8m2()
104 return vdiv_vx_i8m2(op1, op2, vl); in test_vdiv_vx_i8m2()
[all …]
H A Dvsll.c14 return vsll_vv_i8mf8(op1, shift, vl); in test_vsll_vv_i8mf8()
24 return vsll_vx_i8mf8(op1, shift, vl); in test_vsll_vx_i8mf8()
34 return vsll_vv_i8mf4(op1, shift, vl); in test_vsll_vv_i8mf4()
44 return vsll_vx_i8mf4(op1, shift, vl); in test_vsll_vx_i8mf4()
54 return vsll_vv_i8mf2(op1, shift, vl); in test_vsll_vv_i8mf2()
64 return vsll_vx_i8mf2(op1, shift, vl); in test_vsll_vx_i8mf2()
74 return vsll_vv_i8m1(op1, shift, vl); in test_vsll_vv_i8m1()
84 return vsll_vx_i8m1(op1, shift, vl); in test_vsll_vx_i8m1()
94 return vsll_vv_i8m2(op1, shift, vl); in test_vsll_vv_i8m2()
104 return vsll_vx_i8m2(op1, shift, vl); in test_vsll_vx_i8m2()
[all …]
/dports/devel/wasi-libcxx/llvm-project-13.0.1.src/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/
H A Dvasub.c14 return vasub(op1, op2, vl); in test_vasub_vv_i8mf8()
24 return vasub(op1, op2, vl); in test_vasub_vx_i8mf8()
34 return vasub(op1, op2, vl); in test_vasub_vv_i8mf4()
44 return vasub(op1, op2, vl); in test_vasub_vx_i8mf4()
54 return vasub(op1, op2, vl); in test_vasub_vv_i8mf2()
64 return vasub(op1, op2, vl); in test_vasub_vx_i8mf2()
74 return vasub(op1, op2, vl); in test_vasub_vv_i8m1()
84 return vasub(op1, op2, vl); in test_vasub_vx_i8m1()
94 return vasub(op1, op2, vl); in test_vasub_vv_i8m2()
104 return vasub(op1, op2, vl); in test_vasub_vx_i8m2()
[all …]
H A Dvmseq.c14 return vmseq(op1, op2, vl); in test_vmseq_vv_i8mf8_b64()
24 return vmseq(op1, op2, vl); in test_vmseq_vx_i8mf8_b64()
34 return vmseq(op1, op2, vl); in test_vmseq_vv_i8mf4_b32()
44 return vmseq(op1, op2, vl); in test_vmseq_vx_i8mf4_b32()
54 return vmseq(op1, op2, vl); in test_vmseq_vv_i8mf2_b16()
64 return vmseq(op1, op2, vl); in test_vmseq_vx_i8mf2_b16()
74 return vmseq(op1, op2, vl); in test_vmseq_vv_i8m1_b8()
84 return vmseq(op1, op2, vl); in test_vmseq_vx_i8m1_b8()
94 return vmseq(op1, op2, vl); in test_vmseq_vv_i8m2_b4()
104 return vmseq(op1, op2, vl); in test_vmseq_vx_i8m2_b4()
[all …]
H A Dvmsgt.c14 return vmsgt(op1, op2, vl); in test_vmsgt_vv_i8mf8_b64()
24 return vmsgt(op1, op2, vl); in test_vmsgt_vx_i8mf8_b64()
34 return vmsgt(op1, op2, vl); in test_vmsgt_vv_i8mf4_b32()
44 return vmsgt(op1, op2, vl); in test_vmsgt_vx_i8mf4_b32()
54 return vmsgt(op1, op2, vl); in test_vmsgt_vv_i8mf2_b16()
64 return vmsgt(op1, op2, vl); in test_vmsgt_vx_i8mf2_b16()
74 return vmsgt(op1, op2, vl); in test_vmsgt_vv_i8m1_b8()
84 return vmsgt(op1, op2, vl); in test_vmsgt_vx_i8m1_b8()
94 return vmsgt(op1, op2, vl); in test_vmsgt_vv_i8m2_b4()
104 return vmsgt(op1, op2, vl); in test_vmsgt_vx_i8m2_b4()
[all …]
H A Dvsll.c14 return vsll(op1, shift, vl); in test_vsll_vv_i8mf8()
24 return vsll(op1, shift, vl); in test_vsll_vx_i8mf8()
34 return vsll(op1, shift, vl); in test_vsll_vv_i8mf4()
44 return vsll(op1, shift, vl); in test_vsll_vx_i8mf4()
54 return vsll(op1, shift, vl); in test_vsll_vv_i8mf2()
64 return vsll(op1, shift, vl); in test_vsll_vx_i8mf2()
74 return vsll(op1, shift, vl); in test_vsll_vv_i8m1()
84 return vsll(op1, shift, vl); in test_vsll_vx_i8m1()
94 return vsll(op1, shift, vl); in test_vsll_vv_i8m2()
104 return vsll(op1, shift, vl); in test_vsll_vx_i8m2()
[all …]
H A Dvsub.c14 return vsub(op1, op2, vl); in test_vsub_vv_i8mf8()
24 return vsub(op1, op2, vl); in test_vsub_vx_i8mf8()
34 return vsub(op1, op2, vl); in test_vsub_vv_i8mf4()
44 return vsub(op1, op2, vl); in test_vsub_vx_i8mf4()
54 return vsub(op1, op2, vl); in test_vsub_vv_i8mf2()
64 return vsub(op1, op2, vl); in test_vsub_vx_i8mf2()
74 return vsub(op1, op2, vl); in test_vsub_vv_i8m1()
84 return vsub(op1, op2, vl); in test_vsub_vx_i8m1()
94 return vsub(op1, op2, vl); in test_vsub_vv_i8m2()
104 return vsub(op1, op2, vl); in test_vsub_vx_i8m2()
[all …]
H A Dvaadd.c14 return vaadd(op1, op2, vl); in test_vaadd_vv_i8mf8()
24 return vaadd(op1, op2, vl); in test_vaadd_vx_i8mf8()
34 return vaadd(op1, op2, vl); in test_vaadd_vv_i8mf4()
44 return vaadd(op1, op2, vl); in test_vaadd_vx_i8mf4()
54 return vaadd(op1, op2, vl); in test_vaadd_vv_i8mf2()
64 return vaadd(op1, op2, vl); in test_vaadd_vx_i8mf2()
74 return vaadd(op1, op2, vl); in test_vaadd_vv_i8m1()
84 return vaadd(op1, op2, vl); in test_vaadd_vx_i8m1()
94 return vaadd(op1, op2, vl); in test_vaadd_vv_i8m2()
104 return vaadd(op1, op2, vl); in test_vaadd_vx_i8m2()
[all …]
H A Dvadd.c15 return vadd(op1, op2, vl); in test_vadd_vv_i8mf8()
25 return vadd(op1, op2, vl); in test_vadd_vx_i8mf8()
35 return vadd(op1, op2, vl); in test_vadd_vv_i8mf4()
45 return vadd(op1, op2, vl); in test_vadd_vx_i8mf4()
55 return vadd(op1, op2, vl); in test_vadd_vv_i8mf2()
65 return vadd(op1, op2, vl); in test_vadd_vx_i8mf2()
75 return vadd(op1, op2, vl); in test_vadd_vv_i8m1()
85 return vadd(op1, op2, vl); in test_vadd_vx_i8m1()
95 return vadd(op1, op2, vl); in test_vadd_vv_i8m2()
105 return vadd(op1, op2, vl); in test_vadd_vx_i8m2()
[all …]
H A Dvand.c14 return vand(op1, op2, vl); in test_vand_vv_i8mf8()
24 return vand(op1, op2, vl); in test_vand_vx_i8mf8()
34 return vand(op1, op2, vl); in test_vand_vv_i8mf4()
44 return vand(op1, op2, vl); in test_vand_vx_i8mf4()
54 return vand(op1, op2, vl); in test_vand_vv_i8mf2()
64 return vand(op1, op2, vl); in test_vand_vx_i8mf2()
74 return vand(op1, op2, vl); in test_vand_vv_i8m1()
84 return vand(op1, op2, vl); in test_vand_vx_i8m1()
94 return vand(op1, op2, vl); in test_vand_vv_i8m2()
104 return vand(op1, op2, vl); in test_vand_vx_i8m2()
[all …]
H A Dvor.c14 return vor(op1, op2, vl); in test_vor_vv_i8mf8()
24 return vor(op1, op2, vl); in test_vor_vx_i8mf8()
34 return vor(op1, op2, vl); in test_vor_vv_i8mf4()
44 return vor(op1, op2, vl); in test_vor_vx_i8mf4()
54 return vor(op1, op2, vl); in test_vor_vv_i8mf2()
64 return vor(op1, op2, vl); in test_vor_vx_i8mf2()
74 return vor(op1, op2, vl); in test_vor_vv_i8m1()
84 return vor(op1, op2, vl); in test_vor_vx_i8m1()
94 return vor(op1, op2, vl); in test_vor_vv_i8m2()
104 return vor(op1, op2, vl); in test_vor_vx_i8m2()
[all …]
H A Dvrem.c14 return vrem(op1, op2, vl); in test_vrem_vv_i8mf8()
24 return vrem(op1, op2, vl); in test_vrem_vx_i8mf8()
34 return vrem(op1, op2, vl); in test_vrem_vv_i8mf4()
44 return vrem(op1, op2, vl); in test_vrem_vx_i8mf4()
54 return vrem(op1, op2, vl); in test_vrem_vv_i8mf2()
64 return vrem(op1, op2, vl); in test_vrem_vx_i8mf2()
74 return vrem(op1, op2, vl); in test_vrem_vv_i8m1()
84 return vrem(op1, op2, vl); in test_vrem_vx_i8m1()
94 return vrem(op1, op2, vl); in test_vrem_vv_i8m2()
104 return vrem(op1, op2, vl); in test_vrem_vx_i8m2()
[all …]
H A Dvsadd.c14 return vsadd(op1, op2, vl); in test_vsadd_vv_i8mf8()
24 return vsadd(op1, op2, vl); in test_vsadd_vx_i8mf8()
34 return vsadd(op1, op2, vl); in test_vsadd_vv_i8mf4()
44 return vsadd(op1, op2, vl); in test_vsadd_vx_i8mf4()
54 return vsadd(op1, op2, vl); in test_vsadd_vv_i8mf2()
64 return vsadd(op1, op2, vl); in test_vsadd_vx_i8mf2()
74 return vsadd(op1, op2, vl); in test_vsadd_vv_i8m1()
84 return vsadd(op1, op2, vl); in test_vsadd_vx_i8m1()
94 return vsadd(op1, op2, vl); in test_vsadd_vv_i8m2()
104 return vsadd(op1, op2, vl); in test_vsadd_vx_i8m2()
[all …]
H A Dvdiv.c14 return vdiv(op1, op2, vl); in test_vdiv_vv_i8mf8()
24 return vdiv(op1, op2, vl); in test_vdiv_vx_i8mf8()
34 return vdiv(op1, op2, vl); in test_vdiv_vv_i8mf4()
44 return vdiv(op1, op2, vl); in test_vdiv_vx_i8mf4()
54 return vdiv(op1, op2, vl); in test_vdiv_vv_i8mf2()
64 return vdiv(op1, op2, vl); in test_vdiv_vx_i8mf2()
74 return vdiv(op1, op2, vl); in test_vdiv_vv_i8m1()
84 return vdiv(op1, op2, vl); in test_vdiv_vx_i8m1()
94 return vdiv(op1, op2, vl); in test_vdiv_vv_i8m2()
104 return vdiv(op1, op2, vl); in test_vdiv_vx_i8m2()
[all …]
H A Dvmin.c14 return vmin(op1, op2, vl); in test_vmin_vv_i8mf8()
24 return vmin(op1, op2, vl); in test_vmin_vx_i8mf8()
34 return vmin(op1, op2, vl); in test_vmin_vv_i8mf4()
44 return vmin(op1, op2, vl); in test_vmin_vx_i8mf4()
54 return vmin(op1, op2, vl); in test_vmin_vv_i8mf2()
64 return vmin(op1, op2, vl); in test_vmin_vx_i8mf2()
74 return vmin(op1, op2, vl); in test_vmin_vv_i8m1()
84 return vmin(op1, op2, vl); in test_vmin_vx_i8m1()
94 return vmin(op1, op2, vl); in test_vmin_vv_i8m2()
104 return vmin(op1, op2, vl); in test_vmin_vx_i8m2()
[all …]

1...<<11121314151617181920>>...218