1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s -check-prefix=X64
3; RUN: llc < %s -mtriple=i686-unknown-unknown   | FileCheck %s -check-prefix=X32
4
5; With -tailcallopt, CodeGen guarantees a tail call optimization
6; for all of these.
7
8declare dso_local tailcc i32 @tailcallee(i32 %a1, i32 %a2, i32 %a3, i32 %a4)
9
10define dso_local tailcc i32 @tailcaller(i32 %in1, i32 %in2) nounwind {
11; X64-LABEL: tailcaller:
12; X64:       # %bb.0: # %entry
13; X64-NEXT:    pushq %rax
14; X64-NEXT:    movl %edi, %edx
15; X64-NEXT:    movl %esi, %ecx
16; X64-NEXT:    popq %rax
17; X64-NEXT:    jmp tailcallee # TAILCALL
18;
19; X32-LABEL: tailcaller:
20; X32:       # %bb.0: # %entry
21; X32-NEXT:    subl $16, %esp
22; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp)
23; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
24; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp)
25; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
26; X32-NEXT:    addl $8, %esp
27; X32-NEXT:    jmp tailcallee # TAILCALL
28entry:
29  %tmp11 = tail call tailcc i32 @tailcallee(i32 %in1, i32 %in2, i32 %in1, i32 %in2)
30  ret i32 %tmp11
31}
32
33declare dso_local tailcc i8* @alias_callee()
34
35define tailcc noalias i8* @noalias_caller() nounwind {
36; X64-LABEL: noalias_caller:
37; X64:       # %bb.0:
38; X64-NEXT:    pushq %rax
39; X64-NEXT:    popq %rax
40; X64-NEXT:    jmp alias_callee # TAILCALL
41;
42; X32-LABEL: noalias_caller:
43; X32:       # %bb.0:
44; X32-NEXT:    jmp alias_callee # TAILCALL
45  %p = tail call tailcc i8* @alias_callee()
46  ret i8* %p
47}
48
49declare dso_local tailcc noalias i8* @noalias_callee()
50
51define dso_local tailcc i8* @alias_caller() nounwind {
52; X64-LABEL: alias_caller:
53; X64:       # %bb.0:
54; X64-NEXT:    pushq %rax
55; X64-NEXT:    popq %rax
56; X64-NEXT:    jmp noalias_callee # TAILCALL
57;
58; X32-LABEL: alias_caller:
59; X32:       # %bb.0:
60; X32-NEXT:    jmp noalias_callee # TAILCALL
61  %p = tail call tailcc noalias i8* @noalias_callee()
62  ret i8* %p
63}
64
65declare dso_local tailcc i32 @i32_callee()
66
67define dso_local tailcc i32 @ret_undef() nounwind {
68; X64-LABEL: ret_undef:
69; X64:       # %bb.0:
70; X64-NEXT:    pushq %rax
71; X64-NEXT:    popq %rax
72; X64-NEXT:    jmp i32_callee # TAILCALL
73;
74; X32-LABEL: ret_undef:
75; X32:       # %bb.0:
76; X32-NEXT:    jmp i32_callee # TAILCALL
77  %p = tail call tailcc i32 @i32_callee()
78  ret i32 undef
79}
80
81declare dso_local tailcc void @does_not_return()
82
83define dso_local tailcc i32 @noret() nounwind {
84; X64-LABEL: noret:
85; X64:       # %bb.0:
86; X64-NEXT:    pushq %rax
87; X64-NEXT:    popq %rax
88; X64-NEXT:    jmp does_not_return # TAILCALL
89;
90; X32-LABEL: noret:
91; X32:       # %bb.0:
92; X32-NEXT:    jmp does_not_return # TAILCALL
93  tail call tailcc void @does_not_return()
94  unreachable
95}
96
97define dso_local tailcc void @void_test(i32, i32, i32, i32) {
98; X64-LABEL: void_test:
99; X64:       # %bb.0: # %entry
100; X64-NEXT:    pushq %rax
101; X64-NEXT:    .cfi_def_cfa_offset 16
102; X64-NEXT:    popq %rax
103; X64-NEXT:    .cfi_def_cfa_offset 8
104; X64-NEXT:    jmp void_test # TAILCALL
105;
106; X32-LABEL: void_test:
107; X32:       # %bb.0: # %entry
108; X32-NEXT:    pushl %esi
109; X32-NEXT:    .cfi_def_cfa_offset 8
110; X32-NEXT:    subl $8, %esp
111; X32-NEXT:    .cfi_def_cfa_offset 16
112; X32-NEXT:    .cfi_offset %esi, -8
113; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
114; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
115; X32-NEXT:    movl %esi, {{[0-9]+}}(%esp)
116; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
117; X32-NEXT:    addl $8, %esp
118; X32-NEXT:    .cfi_def_cfa_offset 8
119; X32-NEXT:    popl %esi
120; X32-NEXT:    .cfi_def_cfa_offset 4
121; X32-NEXT:    jmp void_test # TAILCALL
122  entry:
123   tail call tailcc void @void_test( i32 %0, i32 %1, i32 %2, i32 %3)
124   ret void
125}
126
127define dso_local tailcc i1 @i1test(i32, i32, i32, i32) {
128; X64-LABEL: i1test:
129; X64:       # %bb.0: # %entry
130; X64-NEXT:    pushq %rax
131; X64-NEXT:    .cfi_def_cfa_offset 16
132; X64-NEXT:    popq %rax
133; X64-NEXT:    .cfi_def_cfa_offset 8
134; X64-NEXT:    jmp i1test # TAILCALL
135;
136; X32-LABEL: i1test:
137; X32:       # %bb.0: # %entry
138; X32-NEXT:    pushl %esi
139; X32-NEXT:    .cfi_def_cfa_offset 8
140; X32-NEXT:    subl $8, %esp
141; X32-NEXT:    .cfi_def_cfa_offset 16
142; X32-NEXT:    .cfi_offset %esi, -8
143; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
144; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
145; X32-NEXT:    movl %esi, {{[0-9]+}}(%esp)
146; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
147; X32-NEXT:    addl $8, %esp
148; X32-NEXT:    .cfi_def_cfa_offset 8
149; X32-NEXT:    popl %esi
150; X32-NEXT:    .cfi_def_cfa_offset 4
151; X32-NEXT:    jmp i1test # TAILCALL
152  entry:
153  %4 = tail call tailcc i1 @i1test( i32 %0, i32 %1, i32 %2, i32 %3)
154  ret i1 %4
155}
156