1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -inline -o - -S %s | FileCheck %s
3; RUN: opt -passes='cgscc(inline)' %s -S | FileCheck %s
4; RUN: opt -always-inline -o - -S %s | FileCheck %s
5; RUN: opt -passes=always-inline -o - -S %s | FileCheck %s
6
7declare dso_local void @foo(i8*)
8
9; Not interesting to test.
10define dso_local void @ssp(i64 %0) #0 {
11  %2 = alloca i64, align 8
12  store i64 %0, i64* %2, align 8
13  %3 = load i64, i64* %2, align 8
14  %4 = alloca i8, i64 %3, align 16
15  call void @foo(i8* %4)
16  ret void
17}
18
19; Not interesting to test.
20define dso_local void @ssp_alwaysinline(i64 %0) #1 {
21  %2 = alloca i64, align 8
22  store i64 %0, i64* %2, align 8
23  %3 = load i64, i64* %2, align 8
24  %4 = alloca i8, i64 %3, align 16
25  call void @foo(i8* %4)
26  ret void
27}
28
29; @ssp should not be inlined due to mismatch stack protector.
30; @ssp_alwaysinline should be inlined due to alwaysinline.
31define dso_local void @nossp() {
32; CHECK-LABEL: @nossp(
33; CHECK-NEXT:    [[TMP1:%.*]] = alloca i64, align 8
34; CHECK-NEXT:    call void @ssp(i64 1024)
35; CHECK-NEXT:    [[SAVEDSTACK:%.*]] = call i8* @llvm.stacksave()
36; CHECK-NEXT:    [[TMP2:%.*]] = bitcast i64* [[TMP1]] to i8*
37; CHECK-NEXT:    call void @llvm.lifetime.start.p0i8(i64 8, i8* [[TMP2]])
38; CHECK-NEXT:    store i64 1024, i64* [[TMP1]], align 8
39; CHECK-NEXT:    [[TMP3:%.*]] = load i64, i64* [[TMP1]], align 8
40; CHECK-NEXT:    [[TMP4:%.*]] = alloca i8, i64 [[TMP3]], align 16
41; CHECK-NEXT:    call void @foo(i8* [[TMP4]])
42; CHECK-NEXT:    [[TMP5:%.*]] = bitcast i64* [[TMP1]] to i8*
43; CHECK-NEXT:    call void @llvm.lifetime.end.p0i8(i64 8, i8* [[TMP5]])
44; CHECK-NEXT:    call void @llvm.stackrestore(i8* [[SAVEDSTACK]])
45; CHECK-NEXT:    ret void
46;
47  call void @ssp(i64 1024)
48  call void @ssp_alwaysinline(i64 1024)
49  ret void
50}
51
52; This is the same case as @nossp above. That the caller has alwaysinline is
53; irrelevant.  Not interesting to test.
54define dso_local void @nossp_alwaysinline() #2 {
55  call void @ssp(i64 1024)
56  call void @ssp_alwaysinline(i64 1024)
57  ret void
58}
59
60; @nossp_alwaysinline should be inlined due to alwaysinline.
61; @ssp should not be inlined due to mismatch stack protector.
62; @ssp_alwaysinline should be inlined due to alwaysinline.
63define dso_local void @nossp_caller() {
64; CHECK-LABEL: @nossp_caller(
65; CHECK-NEXT:    [[TMP1:%.*]] = alloca i64, align 8
66; CHECK-NEXT:    [[SAVEDSTACK:%.*]] = call i8* @llvm.stacksave()
67; CHECK-NEXT:    call void @ssp(i64 1024)
68; CHECK-NEXT:    [[SAVEDSTACK_I:%.*]] = call i8* @llvm.stacksave()
69; CHECK-NEXT:    [[TMP2:%.*]] = bitcast i64* [[TMP1]] to i8*
70; CHECK-NEXT:    call void @llvm.lifetime.start.p0i8(i64 8, i8* [[TMP2]])
71; CHECK-NEXT:    store i64 1024, i64* [[TMP1]], align 8
72; CHECK-NEXT:    [[TMP3:%.*]] = load i64, i64* [[TMP1]], align 8
73; CHECK-NEXT:    [[TMP4:%.*]] = alloca i8, i64 [[TMP3]], align 16
74; CHECK-NEXT:    call void @foo(i8* [[TMP4]])
75; CHECK-NEXT:    [[TMP5:%.*]] = bitcast i64* [[TMP1]] to i8*
76; CHECK-NEXT:    call void @llvm.lifetime.end.p0i8(i64 8, i8* [[TMP5]])
77; CHECK-NEXT:    call void @llvm.stackrestore(i8* [[SAVEDSTACK_I]])
78; CHECK-NEXT:    call void @llvm.stackrestore(i8* [[SAVEDSTACK]])
79; CHECK-NEXT:    ret void
80;
81  call void @nossp_alwaysinline()
82  ret void
83}
84
85; @nossp should not be inlined due to mismatch stack protector.
86define dso_local void @ssp2() #0 {
87; CHECK-LABEL: @ssp2(
88; CHECK-NEXT:    call void @nossp()
89; CHECK-NEXT:    ret void
90;
91  call void @nossp()
92  ret void
93}
94
95attributes #0 = { sspstrong }
96attributes #1 = { sspstrong alwaysinline }
97attributes #2 = { alwaysinline}
98