1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -verify-machineinstrs -mtriple=powerpc64le-unknown-linux-gnu \
3; RUN:   -mcpu=pwr10 -ppc-asm-full-reg-names -ppc-vsr-nums-as-vr < %s | \
4; RUN:   FileCheck %s
5
6; These test cases aim to test the bit manipulation operations on Power10.
7
8declare <2 x i64> @llvm.ppc.altivec.vpdepd(<2 x i64>, <2 x i64>)
9declare <2 x i64> @llvm.ppc.altivec.vpextd(<2 x i64>, <2 x i64>)
10declare i64 @llvm.ppc.pdepd(i64, i64)
11declare i64 @llvm.ppc.pextd(i64, i64)
12declare <2 x i64> @llvm.ppc.altivec.vcfuged(<2 x i64>, <2 x i64>)
13declare i64 @llvm.ppc.cfuged(i64, i64)
14declare i64 @llvm.ppc.altivec.vgnb(<1 x i128>, i32)
15declare <2 x i64> @llvm.ppc.vsx.xxeval(<2 x i64>, <2 x i64>, <2 x i64>, i32)
16declare <2 x i64> @llvm.ppc.altivec.vclzdm(<2 x i64>, <2 x i64>)
17declare <2 x i64> @llvm.ppc.altivec.vctzdm(<2 x i64>, <2 x i64>)
18declare i64 @llvm.ppc.cntlzdm(i64, i64)
19declare i64 @llvm.ppc.cnttzdm(i64, i64)
20
21define <2 x i64> @test_vpdepd(<2 x i64> %a, <2 x i64> %b) {
22; CHECK-LABEL: test_vpdepd:
23; CHECK:       # %bb.0: # %entry
24; CHECK-NEXT:    vpdepd v2, v2, v3
25; CHECK-NEXT:    blr
26entry:
27  %tmp = tail call <2 x i64> @llvm.ppc.altivec.vpdepd(<2 x i64> %a, <2 x i64> %b)
28  ret <2 x i64> %tmp
29}
30
31define <2 x i64> @test_vpextd(<2 x i64> %a, <2 x i64> %b) {
32; CHECK-LABEL: test_vpextd:
33; CHECK:       # %bb.0: # %entry
34; CHECK-NEXT:    vpextd v2, v2, v3
35; CHECK-NEXT:    blr
36entry:
37  %tmp = tail call <2 x i64> @llvm.ppc.altivec.vpextd(<2 x i64> %a, <2 x i64> %b)
38  ret <2 x i64> %tmp
39}
40
41define i64 @test_pdepd(i64 %a, i64 %b) {
42; CHECK-LABEL: test_pdepd:
43; CHECK:       # %bb.0: # %entry
44; CHECK-NEXT:    pdepd r3, r3, r4
45; CHECK-NEXT:    blr
46entry:
47  %tmp = tail call i64 @llvm.ppc.pdepd(i64 %a, i64 %b)
48  ret i64 %tmp
49}
50
51define i64 @test_pextd(i64 %a, i64 %b) {
52; CHECK-LABEL: test_pextd:
53; CHECK:       # %bb.0: # %entry
54; CHECK-NEXT:    pextd r3, r3, r4
55; CHECK-NEXT:    blr
56entry:
57  %tmp = tail call i64 @llvm.ppc.pextd(i64 %a, i64 %b)
58  ret i64 %tmp
59}
60
61define <2 x i64> @test_vcfuged(<2 x i64> %a, <2 x i64> %b) {
62; CHECK-LABEL: test_vcfuged:
63; CHECK:       # %bb.0: # %entry
64; CHECK-NEXT:    vcfuged v2, v2, v3
65; CHECK-NEXT:    blr
66entry:
67  %tmp = tail call <2 x i64> @llvm.ppc.altivec.vcfuged(<2 x i64> %a, <2 x i64> %b)
68  ret <2 x i64> %tmp
69}
70
71define i64 @test_cfuged(i64 %a, i64 %b) {
72; CHECK-LABEL: test_cfuged:
73; CHECK:       # %bb.0: # %entry
74; CHECK-NEXT:    cfuged r3, r3, r4
75; CHECK-NEXT:    blr
76entry:
77  %tmp = tail call i64 @llvm.ppc.cfuged(i64 %a, i64 %b)
78  ret i64 %tmp
79}
80
81define i64 @test_vgnb_1(<1 x i128> %a) {
82; CHECK-LABEL: test_vgnb_1:
83; CHECK:       # %bb.0: # %entry
84; CHECK-NEXT:    vgnb r3, v2, 2
85; CHECK-NEXT:    blr
86entry:
87  %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 2)
88  ret i64 %tmp
89}
90
91define i64 @test_vgnb_2(<1 x i128> %a) {
92; CHECK-LABEL: test_vgnb_2:
93; CHECK:       # %bb.0: # %entry
94; CHECK-NEXT:    vgnb r3, v2, 7
95; CHECK-NEXT:    blr
96entry:
97  %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 7)
98  ret i64 %tmp
99}
100
101define i64 @test_vgnb_3(<1 x i128> %a) {
102; CHECK-LABEL: test_vgnb_3:
103; CHECK:       # %bb.0: # %entry
104; CHECK-NEXT:    vgnb r3, v2, 5
105; CHECK-NEXT:    blr
106entry:
107  %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 5)
108  ret i64 %tmp
109}
110
111define <2 x i64> @test_xxeval(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) {
112; CHECK-LABEL: test_xxeval:
113; CHECK:       # %bb.0: # %entry
114; CHECK-NEXT:    xxeval v2, v2, v3, v4, 255
115; CHECK-NEXT:    blr
116entry:
117  %tmp = tail call <2 x i64> @llvm.ppc.vsx.xxeval(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c, i32 255)
118  ret <2 x i64> %tmp
119}
120
121define <2 x i64> @test_vclzdm(<2 x i64> %a, <2 x i64> %b) {
122; CHECK-LABEL: test_vclzdm:
123; CHECK:       # %bb.0: # %entry
124; CHECK-NEXT:    vclzdm v2, v2, v3
125; CHECK-NEXT:    blr
126entry:
127  %tmp = tail call <2 x i64> @llvm.ppc.altivec.vclzdm(<2 x i64> %a, <2 x i64> %b)
128  ret <2 x i64> %tmp
129}
130
131define <2 x i64> @test_vctzdm(<2 x i64> %a, <2 x i64> %b) {
132; CHECK-LABEL: test_vctzdm:
133; CHECK:       # %bb.0: # %entry
134; CHECK-NEXT:    vctzdm v2, v2, v3
135; CHECK-NEXT:    blr
136entry:
137  %tmp = tail call <2 x i64> @llvm.ppc.altivec.vctzdm(<2 x i64> %a, <2 x i64> %b)
138  ret <2 x i64> %tmp
139}
140
141define i64 @test_cntlzdm(i64 %a, i64 %b) {
142; CHECK-LABEL: test_cntlzdm:
143; CHECK:       # %bb.0: # %entry
144; CHECK-NEXT:    cntlzdm r3, r3, r4
145; CHECK-NEXT:    blr
146entry:
147  %tmp = tail call i64 @llvm.ppc.cntlzdm(i64 %a, i64 %b)
148  ret i64 %tmp
149}
150
151define i64 @test_cnttzdm(i64 %a, i64 %b) {
152; CHECK-LABEL: test_cnttzdm:
153; CHECK:       # %bb.0: # %entry
154; CHECK-NEXT:    cnttzdm r3, r3, r4
155; CHECK-NEXT:    blr
156entry:
157  %tmp = tail call i64 @llvm.ppc.cnttzdm(i64 %a, i64 %b)
158  ret i64 %tmp
159}
160