1; RUN: llc < %s -march=x86-64 -mcpu=athlon | FileCheck %s
2; RUN: llc < %s -march=x86-64 -mcpu=athlon-tbird | FileCheck %s
3; RUN: llc < %s -march=x86-64 -mcpu=athlon-4 | FileCheck %s
4; RUN: llc < %s -march=x86-64 -mcpu=athlon-xp | FileCheck %s
5; RUN: llc < %s -march=x86-64 -mcpu=athlon-mp | FileCheck %s
6; RUN: llc < %s -march=x86-64 -mcpu=k8 | FileCheck %s
7; RUN: llc < %s -march=x86-64 -mcpu=opteron | FileCheck %s
8; RUN: llc < %s -march=x86-64 -mcpu=athlon64 | FileCheck %s
9; RUN: llc < %s -march=x86-64 -mcpu=athlon-fx | FileCheck %s
10; RUN: llc < %s -march=x86-64 -mcpu=k8-sse3 | FileCheck %s
11; RUN: llc < %s -march=x86-64 -mcpu=opteron-sse3 | FileCheck %s
12; RUN: llc < %s -march=x86-64 -mcpu=athlon64-sse3 | FileCheck %s
13; RUN: llc < %s -march=x86-64 -mcpu=amdfam10 | FileCheck %s
14; RUN: llc < %s -march=x86-64 -mcpu=btver1 | FileCheck %s
15; RUN: llc < %s -march=x86-64 -mcpu=btver2 | FileCheck %s
16; RUN: llc < %s -march=x86-64 -mcpu=bdver1 | FileCheck %s
17; RUN: llc < %s -march=x86-64 -mcpu=bdver2 | FileCheck %s
18
19; Verify that for the X86_64 processors that are known to have poor latency
20; double precision shift instructions we do not generate 'shld' or 'shrd'
21; instructions.
22
23;uint64_t lshift(uint64_t a, uint64_t b, int c)
24;{
25;    return (a << c) | (b >> (64-c));
26;}
27
28define i64 @lshift(i64 %a, i64 %b, i32 %c) nounwind readnone {
29entry:
30; CHECK-NOT: shld
31  %sh_prom = zext i32 %c to i64
32  %shl = shl i64 %a, %sh_prom
33  %sub = sub nsw i32 64, %c
34  %sh_prom1 = zext i32 %sub to i64
35  %shr = lshr i64 %b, %sh_prom1
36  %or = or i64 %shr, %shl
37  ret i64 %or
38}
39
40;uint64_t rshift(uint64_t a, uint64_t b, int c)
41;{
42;    return (a >> c) | (b << (64-c));
43;}
44
45define i64 @rshift(i64 %a, i64 %b, i32 %c) nounwind readnone {
46entry:
47; CHECK-NOT: shrd
48  %sh_prom = zext i32 %c to i64
49  %shr = lshr i64 %a, %sh_prom
50  %sub = sub nsw i32 64, %c
51  %sh_prom1 = zext i32 %sub to i64
52  %shl = shl i64 %b, %sh_prom1
53  %or = or i64 %shl, %shr
54  ret i64 %or
55}
56
57
58