1; RUN: llc < %s -march=x86 -tailcallopt | grep TAILCALL | count 7
2
3; With -tailcallopt, CodeGen guarantees a tail call optimization
4; for all of these.
5
6declare fastcc i32 @tailcallee(i32 %a1, i32 %a2, i32 %a3, i32 %a4)
7
8define fastcc i32 @tailcaller(i32 %in1, i32 %in2) nounwind {
9entry:
10  %tmp11 = tail call fastcc i32 @tailcallee(i32 %in1, i32 %in2, i32 %in1, i32 %in2)
11  ret i32 %tmp11
12}
13
14declare fastcc i8* @alias_callee()
15
16define fastcc noalias i8* @noalias_caller() nounwind {
17  %p = tail call fastcc i8* @alias_callee()
18  ret i8* %p
19}
20
21declare fastcc noalias i8* @noalias_callee()
22
23define fastcc i8* @alias_caller() nounwind {
24  %p = tail call fastcc noalias i8* @noalias_callee()
25  ret i8* %p
26}
27
28declare fastcc i32 @i32_callee()
29
30define fastcc i32 @ret_undef() nounwind {
31  %p = tail call fastcc i32 @i32_callee()
32  ret i32 undef
33}
34
35declare fastcc void @does_not_return()
36
37define fastcc i32 @noret() nounwind {
38  tail call fastcc void @does_not_return()
39  unreachable
40}
41
42define fastcc void @void_test(i32, i32, i32, i32) {
43  entry:
44   tail call fastcc void @void_test( i32 %0, i32 %1, i32 %2, i32 %3)
45   ret void
46}
47
48define fastcc i1 @i1test(i32, i32, i32, i32) {
49  entry:
50  %4 = tail call fastcc i1 @i1test( i32 %0, i32 %1, i32 %2, i32 %3)
51  ret i1 %4
52}
53