1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefixes=CHECK,BEXTR-SLOW,BMI1-SLOW
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2 | FileCheck %s --check-prefixes=CHECK,BEXTR-SLOW,BMI2-SLOW
4; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+fast-bextr | FileCheck %s --check-prefixes=CHECK,BEXTR-FAST
5; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2,+fast-bextr | FileCheck %s --check-prefixes=CHECK,BEXTR-FAST
6
7declare i64 @llvm.x86.bmi.bextr.64(i64, i64)
8
9define i64 @bextr64(i64 %x, i64 %y)   {
10; CHECK-LABEL: bextr64:
11; CHECK:       # %bb.0:
12; CHECK-NEXT:    bextrq %rsi, %rdi, %rax
13; CHECK-NEXT:    retq
14  %tmp = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %y)
15  ret i64 %tmp
16}
17
18define i64 @bextr64b(i64 %x)  uwtable  ssp {
19; BEXTR-SLOW-LABEL: bextr64b:
20; BEXTR-SLOW:       # %bb.0:
21; BEXTR-SLOW-NEXT:    movq %rdi, %rax
22; BEXTR-SLOW-NEXT:    shrl $4, %eax
23; BEXTR-SLOW-NEXT:    andl $4095, %eax # imm = 0xFFF
24; BEXTR-SLOW-NEXT:    retq
25;
26; BEXTR-FAST-LABEL: bextr64b:
27; BEXTR-FAST:       # %bb.0:
28; BEXTR-FAST-NEXT:    movl $3076, %eax # imm = 0xC04
29; BEXTR-FAST-NEXT:    bextrl %eax, %edi, %eax
30; BEXTR-FAST-NEXT:    retq
31  %1 = lshr i64 %x, 4
32  %2 = and i64 %1, 4095
33  ret i64 %2
34}
35
36; Make sure we still use the AH subreg trick to extract 15:8
37define i64 @bextr64_subreg(i64 %x)  uwtable  ssp {
38; CHECK-LABEL: bextr64_subreg:
39; CHECK:       # %bb.0:
40; CHECK-NEXT:    movq %rdi, %rax
41; CHECK-NEXT:    movzbl %ah, %eax
42; CHECK-NEXT:    retq
43  %1 = lshr i64 %x, 8
44  %2 = and i64 %1, 255
45  ret i64 %2
46}
47
48define i64 @bextr64b_load(i64* %x) {
49; BEXTR-SLOW-LABEL: bextr64b_load:
50; BEXTR-SLOW:       # %bb.0:
51; BEXTR-SLOW-NEXT:    movl (%rdi), %eax
52; BEXTR-SLOW-NEXT:    shrl $4, %eax
53; BEXTR-SLOW-NEXT:    andl $4095, %eax # imm = 0xFFF
54; BEXTR-SLOW-NEXT:    retq
55;
56; BEXTR-FAST-LABEL: bextr64b_load:
57; BEXTR-FAST:       # %bb.0:
58; BEXTR-FAST-NEXT:    movl $3076, %eax # imm = 0xC04
59; BEXTR-FAST-NEXT:    bextrl %eax, (%rdi), %eax
60; BEXTR-FAST-NEXT:    retq
61  %1 = load i64, i64* %x, align 8
62  %2 = lshr i64 %1, 4
63  %3 = and i64 %2, 4095
64  ret i64 %3
65}
66
67; PR34042
68define i64 @bextr64c(i64 %x, i32 %y) {
69; CHECK-LABEL: bextr64c:
70; CHECK:       # %bb.0:
71; CHECK-NEXT:    # kill: def $esi killed $esi def $rsi
72; CHECK-NEXT:    bextrq %rsi, %rdi, %rax
73; CHECK-NEXT:    retq
74  %tmp0 = sext i32 %y to i64
75  %tmp1 = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %tmp0)
76  ret i64 %tmp1
77}
78
79define i64 @bextr64d(i64 %a) {
80; BMI1-SLOW-LABEL: bextr64d:
81; BMI1-SLOW:       # %bb.0: # %entry
82; BMI1-SLOW-NEXT:    shrq $2, %rdi
83; BMI1-SLOW-NEXT:    movl $8448, %eax # imm = 0x2100
84; BMI1-SLOW-NEXT:    bextrq %rax, %rdi, %rax
85; BMI1-SLOW-NEXT:    retq
86;
87; BMI2-SLOW-LABEL: bextr64d:
88; BMI2-SLOW:       # %bb.0: # %entry
89; BMI2-SLOW-NEXT:    movl $35, %eax
90; BMI2-SLOW-NEXT:    bzhiq %rax, %rdi, %rax
91; BMI2-SLOW-NEXT:    shrq $2, %rax
92; BMI2-SLOW-NEXT:    retq
93;
94; BEXTR-FAST-LABEL: bextr64d:
95; BEXTR-FAST:       # %bb.0: # %entry
96; BEXTR-FAST-NEXT:    movl $8450, %eax # imm = 0x2102
97; BEXTR-FAST-NEXT:    bextrq %rax, %rdi, %rax
98; BEXTR-FAST-NEXT:    retq
99entry:
100  %shr = lshr i64 %a, 2
101  %and = and i64 %shr, 8589934591
102  ret i64 %and
103}
104
105define i64 @bextr64d_load(i64* %aptr) {
106; BMI1-SLOW-LABEL: bextr64d_load:
107; BMI1-SLOW:       # %bb.0: # %entry
108; BMI1-SLOW-NEXT:    movq (%rdi), %rax
109; BMI1-SLOW-NEXT:    shrq $2, %rax
110; BMI1-SLOW-NEXT:    movl $8448, %ecx # imm = 0x2100
111; BMI1-SLOW-NEXT:    bextrq %rcx, %rax, %rax
112; BMI1-SLOW-NEXT:    retq
113;
114; BMI2-SLOW-LABEL: bextr64d_load:
115; BMI2-SLOW:       # %bb.0: # %entry
116; BMI2-SLOW-NEXT:    movl $35, %eax
117; BMI2-SLOW-NEXT:    bzhiq %rax, (%rdi), %rax
118; BMI2-SLOW-NEXT:    shrq $2, %rax
119; BMI2-SLOW-NEXT:    retq
120;
121; BEXTR-FAST-LABEL: bextr64d_load:
122; BEXTR-FAST:       # %bb.0: # %entry
123; BEXTR-FAST-NEXT:    movl $8450, %eax # imm = 0x2102
124; BEXTR-FAST-NEXT:    bextrq %rax, (%rdi), %rax
125; BEXTR-FAST-NEXT:    retq
126entry:
127  %a = load i64, i64* %aptr, align 8
128  %shr = lshr i64 %a, 2
129  %and = and i64 %shr, 8589934591
130  ret i64 %and
131}
132
133define i64 @non_bextr64(i64 %x) {
134; CHECK-LABEL: non_bextr64:
135; CHECK:       # %bb.0: # %entry
136; CHECK-NEXT:    shrq $2, %rdi
137; CHECK-NEXT:    movabsq $8589934590, %rax # imm = 0x1FFFFFFFE
138; CHECK-NEXT:    andq %rdi, %rax
139; CHECK-NEXT:    retq
140entry:
141  %shr = lshr i64 %x, 2
142  %and = and i64 %shr, 8589934590
143  ret i64 %and
144}
145