xref: /openbsd/gnu/usr.bin/gcc/gcc/sibcall.c (revision c87b03e5)
1*c87b03e5Sespie /* Generic sibling call optimization support
2*c87b03e5Sespie    Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3*c87b03e5Sespie 
4*c87b03e5Sespie This file is part of GCC.
5*c87b03e5Sespie 
6*c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
7*c87b03e5Sespie the terms of the GNU General Public License as published by the Free
8*c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
9*c87b03e5Sespie version.
10*c87b03e5Sespie 
11*c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*c87b03e5Sespie for more details.
15*c87b03e5Sespie 
16*c87b03e5Sespie You should have received a copy of the GNU General Public License
17*c87b03e5Sespie along with GCC; see the file COPYING.  If not, write to the Free
18*c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19*c87b03e5Sespie 02111-1307, USA.  */
20*c87b03e5Sespie 
21*c87b03e5Sespie #include "config.h"
22*c87b03e5Sespie #include "system.h"
23*c87b03e5Sespie 
24*c87b03e5Sespie #include "rtl.h"
25*c87b03e5Sespie #include "regs.h"
26*c87b03e5Sespie #include "function.h"
27*c87b03e5Sespie #include "hard-reg-set.h"
28*c87b03e5Sespie #include "flags.h"
29*c87b03e5Sespie #include "insn-config.h"
30*c87b03e5Sespie #include "recog.h"
31*c87b03e5Sespie #include "basic-block.h"
32*c87b03e5Sespie #include "output.h"
33*c87b03e5Sespie #include "except.h"
34*c87b03e5Sespie #include "tree.h"
35*c87b03e5Sespie 
36*c87b03e5Sespie /* In case alternate_exit_block contains copy from pseudo, to return value,
37*c87b03e5Sespie    record the pseudo here.  In such case the pseudo must be set to function
38*c87b03e5Sespie    return in the sibcall sequence.  */
39*c87b03e5Sespie static rtx return_value_pseudo;
40*c87b03e5Sespie 
41*c87b03e5Sespie static int identify_call_return_value	PARAMS ((rtx, rtx *, rtx *));
42*c87b03e5Sespie static rtx skip_copy_to_return_value	PARAMS ((rtx));
43*c87b03e5Sespie static rtx skip_use_of_return_value	PARAMS ((rtx, enum rtx_code));
44*c87b03e5Sespie static rtx skip_stack_adjustment	PARAMS ((rtx));
45*c87b03e5Sespie static rtx skip_pic_restore		PARAMS ((rtx));
46*c87b03e5Sespie static rtx skip_jump_insn		PARAMS ((rtx));
47*c87b03e5Sespie static int call_ends_block_p		PARAMS ((rtx, rtx));
48*c87b03e5Sespie static int uses_addressof		PARAMS ((rtx));
49*c87b03e5Sespie static int sequence_uses_addressof	PARAMS ((rtx));
50*c87b03e5Sespie static void purge_reg_equiv_notes	PARAMS ((void));
51*c87b03e5Sespie static void purge_mem_unchanging_flag	PARAMS ((rtx));
52*c87b03e5Sespie static rtx skip_unreturned_value 	PARAMS ((rtx));
53*c87b03e5Sespie 
54*c87b03e5Sespie /* Examine a CALL_PLACEHOLDER pattern and determine where the call's
55*c87b03e5Sespie    return value is located.  P_HARD_RETURN receives the hard register
56*c87b03e5Sespie    that the function used; P_SOFT_RETURN receives the pseudo register
57*c87b03e5Sespie    that the sequence used.  Return nonzero if the values were located.  */
58*c87b03e5Sespie 
59*c87b03e5Sespie static int
identify_call_return_value(cp,p_hard_return,p_soft_return)60*c87b03e5Sespie identify_call_return_value (cp, p_hard_return, p_soft_return)
61*c87b03e5Sespie      rtx cp;
62*c87b03e5Sespie      rtx *p_hard_return, *p_soft_return;
63*c87b03e5Sespie {
64*c87b03e5Sespie   rtx insn, set, hard, soft;
65*c87b03e5Sespie 
66*c87b03e5Sespie   insn = XEXP (cp, 0);
67*c87b03e5Sespie   /* Search backward through the "normal" call sequence to the CALL insn.  */
68*c87b03e5Sespie   while (NEXT_INSN (insn))
69*c87b03e5Sespie     insn = NEXT_INSN (insn);
70*c87b03e5Sespie   while (GET_CODE (insn) != CALL_INSN)
71*c87b03e5Sespie     insn = PREV_INSN (insn);
72*c87b03e5Sespie 
73*c87b03e5Sespie   /* Assume the pattern is (set (dest) (call ...)), or that the first
74*c87b03e5Sespie      member of a parallel is.  This is the hard return register used
75*c87b03e5Sespie      by the function.  */
76*c87b03e5Sespie   if (GET_CODE (PATTERN (insn)) == SET
77*c87b03e5Sespie       && GET_CODE (SET_SRC (PATTERN (insn))) == CALL)
78*c87b03e5Sespie     hard = SET_DEST (PATTERN (insn));
79*c87b03e5Sespie   else if (GET_CODE (PATTERN (insn)) == PARALLEL
80*c87b03e5Sespie 	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET
81*c87b03e5Sespie 	   && GET_CODE (SET_SRC (XVECEXP (PATTERN (insn), 0, 0))) == CALL)
82*c87b03e5Sespie     hard = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
83*c87b03e5Sespie   else
84*c87b03e5Sespie     return 0;
85*c87b03e5Sespie 
86*c87b03e5Sespie   /* If we didn't get a single hard register (e.g. a parallel), give up.  */
87*c87b03e5Sespie   if (GET_CODE (hard) != REG)
88*c87b03e5Sespie     return 0;
89*c87b03e5Sespie 
90*c87b03e5Sespie   /* Stack adjustment done after call may appear here.  */
91*c87b03e5Sespie   insn = skip_stack_adjustment (insn);
92*c87b03e5Sespie   if (! insn)
93*c87b03e5Sespie     return 0;
94*c87b03e5Sespie 
95*c87b03e5Sespie   /* Restore of GP register may appear here.  */
96*c87b03e5Sespie   insn = skip_pic_restore (insn);
97*c87b03e5Sespie   if (! insn)
98*c87b03e5Sespie     return 0;
99*c87b03e5Sespie 
100*c87b03e5Sespie   /* If there's nothing after, there's no soft return value.  */
101*c87b03e5Sespie   insn = NEXT_INSN (insn);
102*c87b03e5Sespie   if (! insn)
103*c87b03e5Sespie     return 0;
104*c87b03e5Sespie 
105*c87b03e5Sespie   /* We're looking for a source of the hard return register.  */
106*c87b03e5Sespie   set = single_set (insn);
107*c87b03e5Sespie   if (! set || SET_SRC (set) != hard)
108*c87b03e5Sespie     return 0;
109*c87b03e5Sespie 
110*c87b03e5Sespie   soft = SET_DEST (set);
111*c87b03e5Sespie   insn = NEXT_INSN (insn);
112*c87b03e5Sespie 
113*c87b03e5Sespie   /* Allow this first destination to be copied to a second register,
114*c87b03e5Sespie      as might happen if the first register wasn't the particular pseudo
115*c87b03e5Sespie      we'd been expecting.  */
116*c87b03e5Sespie   if (insn
117*c87b03e5Sespie       && (set = single_set (insn)) != NULL_RTX
118*c87b03e5Sespie       && SET_SRC (set) == soft)
119*c87b03e5Sespie     {
120*c87b03e5Sespie       soft = SET_DEST (set);
121*c87b03e5Sespie       insn = NEXT_INSN (insn);
122*c87b03e5Sespie     }
123*c87b03e5Sespie 
124*c87b03e5Sespie   /* Don't fool with anything but pseudo registers.  */
125*c87b03e5Sespie   if (GET_CODE (soft) != REG || REGNO (soft) < FIRST_PSEUDO_REGISTER)
126*c87b03e5Sespie     return 0;
127*c87b03e5Sespie 
128*c87b03e5Sespie   /* This value must not be modified before the end of the sequence.  */
129*c87b03e5Sespie   if (reg_set_between_p (soft, insn, NULL_RTX))
130*c87b03e5Sespie     return 0;
131*c87b03e5Sespie 
132*c87b03e5Sespie   *p_hard_return = hard;
133*c87b03e5Sespie   *p_soft_return = soft;
134*c87b03e5Sespie 
135*c87b03e5Sespie   return 1;
136*c87b03e5Sespie }
137*c87b03e5Sespie 
138*c87b03e5Sespie /* If the first real insn after ORIG_INSN copies to this function's
139*c87b03e5Sespie    return value from RETVAL, then return the insn which performs the
140*c87b03e5Sespie    copy.  Otherwise return ORIG_INSN.  */
141*c87b03e5Sespie 
142*c87b03e5Sespie static rtx
skip_copy_to_return_value(orig_insn)143*c87b03e5Sespie skip_copy_to_return_value (orig_insn)
144*c87b03e5Sespie      rtx orig_insn;
145*c87b03e5Sespie {
146*c87b03e5Sespie   rtx insn, set = NULL_RTX;
147*c87b03e5Sespie   rtx hardret, softret;
148*c87b03e5Sespie 
149*c87b03e5Sespie   /* If there is no return value, we have nothing to do.  */
150*c87b03e5Sespie   if (! identify_call_return_value (PATTERN (orig_insn), &hardret, &softret))
151*c87b03e5Sespie     return orig_insn;
152*c87b03e5Sespie 
153*c87b03e5Sespie   insn = next_nonnote_insn (orig_insn);
154*c87b03e5Sespie   if (! insn)
155*c87b03e5Sespie     return orig_insn;
156*c87b03e5Sespie 
157*c87b03e5Sespie   set = single_set (insn);
158*c87b03e5Sespie   if (! set)
159*c87b03e5Sespie     return orig_insn;
160*c87b03e5Sespie 
161*c87b03e5Sespie   if (return_value_pseudo)
162*c87b03e5Sespie     {
163*c87b03e5Sespie       if (SET_DEST (set) == return_value_pseudo
164*c87b03e5Sespie 	  && SET_SRC (set) == softret)
165*c87b03e5Sespie 	return insn;
166*c87b03e5Sespie       return orig_insn;
167*c87b03e5Sespie     }
168*c87b03e5Sespie 
169*c87b03e5Sespie   /* The destination must be the same as the called function's return
170*c87b03e5Sespie      value to ensure that any return value is put in the same place by the
171*c87b03e5Sespie      current function and the function we're calling.
172*c87b03e5Sespie 
173*c87b03e5Sespie      Further, the source must be the same as the pseudo into which the
174*c87b03e5Sespie      called function's return value was copied.  Otherwise we're returning
175*c87b03e5Sespie      some other value.  */
176*c87b03e5Sespie 
177*c87b03e5Sespie #ifndef OUTGOING_REGNO
178*c87b03e5Sespie #define OUTGOING_REGNO(N) (N)
179*c87b03e5Sespie #endif
180*c87b03e5Sespie 
181*c87b03e5Sespie   if (SET_DEST (set) == current_function_return_rtx
182*c87b03e5Sespie       && REG_P (SET_DEST (set))
183*c87b03e5Sespie       && OUTGOING_REGNO (REGNO (SET_DEST (set))) == REGNO (hardret)
184*c87b03e5Sespie       && SET_SRC (set) == softret)
185*c87b03e5Sespie     return insn;
186*c87b03e5Sespie 
187*c87b03e5Sespie   /* Recognize the situation when the called function's return value
188*c87b03e5Sespie      is copied in two steps: first into an intermediate pseudo, then
189*c87b03e5Sespie      the into the calling functions return value register.  */
190*c87b03e5Sespie 
191*c87b03e5Sespie   if (REG_P (SET_DEST (set))
192*c87b03e5Sespie       && SET_SRC (set) == softret)
193*c87b03e5Sespie     {
194*c87b03e5Sespie       rtx x = SET_DEST (set);
195*c87b03e5Sespie 
196*c87b03e5Sespie       insn = next_nonnote_insn (insn);
197*c87b03e5Sespie       if (! insn)
198*c87b03e5Sespie 	return orig_insn;
199*c87b03e5Sespie 
200*c87b03e5Sespie       set = single_set (insn);
201*c87b03e5Sespie       if (! set)
202*c87b03e5Sespie 	return orig_insn;
203*c87b03e5Sespie 
204*c87b03e5Sespie       if (SET_DEST (set) == current_function_return_rtx
205*c87b03e5Sespie 	  && REG_P (SET_DEST (set))
206*c87b03e5Sespie 	  && OUTGOING_REGNO (REGNO (SET_DEST (set))) == REGNO (hardret)
207*c87b03e5Sespie 	  && SET_SRC (set) == x)
208*c87b03e5Sespie 	return insn;
209*c87b03e5Sespie     }
210*c87b03e5Sespie 
211*c87b03e5Sespie   /* It did not look like a copy of the return value, so return the
212*c87b03e5Sespie      same insn we were passed.  */
213*c87b03e5Sespie   return orig_insn;
214*c87b03e5Sespie }
215*c87b03e5Sespie 
216*c87b03e5Sespie /* If the first real insn after ORIG_INSN is a CODE of this function's return
217*c87b03e5Sespie    value, return insn.  Otherwise return ORIG_INSN.  */
218*c87b03e5Sespie 
219*c87b03e5Sespie static rtx
skip_use_of_return_value(orig_insn,code)220*c87b03e5Sespie skip_use_of_return_value (orig_insn, code)
221*c87b03e5Sespie      rtx orig_insn;
222*c87b03e5Sespie      enum rtx_code code;
223*c87b03e5Sespie {
224*c87b03e5Sespie   rtx insn;
225*c87b03e5Sespie 
226*c87b03e5Sespie   insn = next_nonnote_insn (orig_insn);
227*c87b03e5Sespie 
228*c87b03e5Sespie   if (insn
229*c87b03e5Sespie       && GET_CODE (insn) == INSN
230*c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == code
231*c87b03e5Sespie       && (XEXP (PATTERN (insn), 0) == current_function_return_rtx
232*c87b03e5Sespie 	  || XEXP (PATTERN (insn), 0) == const0_rtx))
233*c87b03e5Sespie     return insn;
234*c87b03e5Sespie 
235*c87b03e5Sespie   return orig_insn;
236*c87b03e5Sespie }
237*c87b03e5Sespie 
238*c87b03e5Sespie /* In case function does not return value,  we get clobber of pseudo followed
239*c87b03e5Sespie    by set to hard return value.  */
240*c87b03e5Sespie static rtx
skip_unreturned_value(orig_insn)241*c87b03e5Sespie skip_unreturned_value (orig_insn)
242*c87b03e5Sespie      rtx orig_insn;
243*c87b03e5Sespie {
244*c87b03e5Sespie   rtx insn = next_nonnote_insn (orig_insn);
245*c87b03e5Sespie 
246*c87b03e5Sespie   /* Skip possible clobber of pseudo return register.  */
247*c87b03e5Sespie   if (insn
248*c87b03e5Sespie       && GET_CODE (insn) == INSN
249*c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == CLOBBER
250*c87b03e5Sespie       && REG_P (XEXP (PATTERN (insn), 0))
251*c87b03e5Sespie       && (REGNO (XEXP (PATTERN (insn), 0)) >= FIRST_PSEUDO_REGISTER))
252*c87b03e5Sespie     {
253*c87b03e5Sespie       rtx set_insn = next_nonnote_insn (insn);
254*c87b03e5Sespie       rtx set;
255*c87b03e5Sespie       if (!set_insn)
256*c87b03e5Sespie 	return insn;
257*c87b03e5Sespie       set = single_set (set_insn);
258*c87b03e5Sespie       if (!set
259*c87b03e5Sespie 	  || SET_SRC (set) != XEXP (PATTERN (insn), 0)
260*c87b03e5Sespie 	  || SET_DEST (set) != current_function_return_rtx)
261*c87b03e5Sespie 	return insn;
262*c87b03e5Sespie       return set_insn;
263*c87b03e5Sespie     }
264*c87b03e5Sespie   return orig_insn;
265*c87b03e5Sespie }
266*c87b03e5Sespie 
267*c87b03e5Sespie /* If the first real insn after ORIG_INSN adjusts the stack pointer
268*c87b03e5Sespie    by a constant, return the insn with the stack pointer adjustment.
269*c87b03e5Sespie    Otherwise return ORIG_INSN.  */
270*c87b03e5Sespie 
271*c87b03e5Sespie static rtx
skip_stack_adjustment(orig_insn)272*c87b03e5Sespie skip_stack_adjustment (orig_insn)
273*c87b03e5Sespie      rtx orig_insn;
274*c87b03e5Sespie {
275*c87b03e5Sespie   rtx insn, set = NULL_RTX;
276*c87b03e5Sespie 
277*c87b03e5Sespie   insn = next_nonnote_insn (orig_insn);
278*c87b03e5Sespie 
279*c87b03e5Sespie   if (insn)
280*c87b03e5Sespie     set = single_set (insn);
281*c87b03e5Sespie 
282*c87b03e5Sespie   if (insn
283*c87b03e5Sespie       && set
284*c87b03e5Sespie       && GET_CODE (SET_SRC (set)) == PLUS
285*c87b03e5Sespie       && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
286*c87b03e5Sespie       && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT
287*c87b03e5Sespie       && SET_DEST (set) == stack_pointer_rtx)
288*c87b03e5Sespie     return insn;
289*c87b03e5Sespie 
290*c87b03e5Sespie   return orig_insn;
291*c87b03e5Sespie }
292*c87b03e5Sespie 
293*c87b03e5Sespie /* If the first real insn after ORIG_INSN sets the pic register,
294*c87b03e5Sespie    return it.  Otherwise return ORIG_INSN.  */
295*c87b03e5Sespie 
296*c87b03e5Sespie static rtx
skip_pic_restore(orig_insn)297*c87b03e5Sespie skip_pic_restore (orig_insn)
298*c87b03e5Sespie      rtx orig_insn;
299*c87b03e5Sespie {
300*c87b03e5Sespie   rtx insn, set = NULL_RTX;
301*c87b03e5Sespie 
302*c87b03e5Sespie   insn = next_nonnote_insn (orig_insn);
303*c87b03e5Sespie 
304*c87b03e5Sespie   if (insn)
305*c87b03e5Sespie     set = single_set (insn);
306*c87b03e5Sespie 
307*c87b03e5Sespie   if (insn && set && SET_DEST (set) == pic_offset_table_rtx)
308*c87b03e5Sespie     return insn;
309*c87b03e5Sespie 
310*c87b03e5Sespie   return orig_insn;
311*c87b03e5Sespie }
312*c87b03e5Sespie 
313*c87b03e5Sespie /* If the first real insn after ORIG_INSN is a jump, return the JUMP_INSN.
314*c87b03e5Sespie    Otherwise return ORIG_INSN.  */
315*c87b03e5Sespie 
316*c87b03e5Sespie static rtx
skip_jump_insn(orig_insn)317*c87b03e5Sespie skip_jump_insn (orig_insn)
318*c87b03e5Sespie      rtx orig_insn;
319*c87b03e5Sespie {
320*c87b03e5Sespie   rtx insn;
321*c87b03e5Sespie 
322*c87b03e5Sespie   insn = next_nonnote_insn (orig_insn);
323*c87b03e5Sespie 
324*c87b03e5Sespie   if (insn
325*c87b03e5Sespie       && GET_CODE (insn) == JUMP_INSN
326*c87b03e5Sespie       && any_uncondjump_p (insn))
327*c87b03e5Sespie     return insn;
328*c87b03e5Sespie 
329*c87b03e5Sespie   return orig_insn;
330*c87b03e5Sespie }
331*c87b03e5Sespie 
332*c87b03e5Sespie /* Using the above functions, see if INSN, skipping any of the above,
333*c87b03e5Sespie    goes all the way to END, the end of a basic block.  Return 1 if so.  */
334*c87b03e5Sespie 
335*c87b03e5Sespie static int
call_ends_block_p(insn,end)336*c87b03e5Sespie call_ends_block_p (insn, end)
337*c87b03e5Sespie      rtx insn;
338*c87b03e5Sespie      rtx end;
339*c87b03e5Sespie {
340*c87b03e5Sespie   rtx new_insn;
341*c87b03e5Sespie   /* END might be a note, so get the last nonnote insn of the block.  */
342*c87b03e5Sespie   end = next_nonnote_insn (PREV_INSN (end));
343*c87b03e5Sespie 
344*c87b03e5Sespie   /* If the call was the end of the block, then we're OK.  */
345*c87b03e5Sespie   if (insn == end)
346*c87b03e5Sespie     return 1;
347*c87b03e5Sespie 
348*c87b03e5Sespie   /* Skip over copying from the call's return value pseudo into
349*c87b03e5Sespie      this function's hard return register and if that's the end
350*c87b03e5Sespie      of the block, we're OK.  */
351*c87b03e5Sespie   new_insn = skip_copy_to_return_value (insn);
352*c87b03e5Sespie 
353*c87b03e5Sespie   /* In case we return value in pseudo, we must set the pseudo to
354*c87b03e5Sespie      return value of called function, otherwise we are returning
355*c87b03e5Sespie      something else.  */
356*c87b03e5Sespie   if (return_value_pseudo && insn == new_insn)
357*c87b03e5Sespie     return 0;
358*c87b03e5Sespie   insn = new_insn;
359*c87b03e5Sespie 
360*c87b03e5Sespie   if (insn == end)
361*c87b03e5Sespie     return 1;
362*c87b03e5Sespie 
363*c87b03e5Sespie   /* Skip any stack adjustment.  */
364*c87b03e5Sespie   insn = skip_stack_adjustment (insn);
365*c87b03e5Sespie   if (insn == end)
366*c87b03e5Sespie     return 1;
367*c87b03e5Sespie 
368*c87b03e5Sespie   /* Skip over a CLOBBER of the return value as a hard reg.  */
369*c87b03e5Sespie   insn = skip_use_of_return_value (insn, CLOBBER);
370*c87b03e5Sespie   if (insn == end)
371*c87b03e5Sespie     return 1;
372*c87b03e5Sespie 
373*c87b03e5Sespie   /* Skip over a CLOBBER of the return value as a hard reg.  */
374*c87b03e5Sespie   insn = skip_unreturned_value (insn);
375*c87b03e5Sespie   if (insn == end)
376*c87b03e5Sespie     return 1;
377*c87b03e5Sespie 
378*c87b03e5Sespie   /* Skip over a USE of the return value (as a hard reg).  */
379*c87b03e5Sespie   insn = skip_use_of_return_value (insn, USE);
380*c87b03e5Sespie   if (insn == end)
381*c87b03e5Sespie     return 1;
382*c87b03e5Sespie 
383*c87b03e5Sespie   /* Skip over a JUMP_INSN at the end of the block.  If that doesn't end the
384*c87b03e5Sespie      block, the original CALL_INSN didn't.  */
385*c87b03e5Sespie   insn = skip_jump_insn (insn);
386*c87b03e5Sespie   return insn == end;
387*c87b03e5Sespie }
388*c87b03e5Sespie 
389*c87b03e5Sespie /* Scan the rtx X for ADDRESSOF expressions or
390*c87b03e5Sespie    current_function_internal_arg_pointer registers.
391*c87b03e5Sespie    Return nonzero if an ADDRESSOF or current_function_internal_arg_pointer
392*c87b03e5Sespie    is found outside of some MEM expression, else return zero.  */
393*c87b03e5Sespie 
394*c87b03e5Sespie static int
uses_addressof(x)395*c87b03e5Sespie uses_addressof (x)
396*c87b03e5Sespie      rtx x;
397*c87b03e5Sespie {
398*c87b03e5Sespie   RTX_CODE code;
399*c87b03e5Sespie   int i, j;
400*c87b03e5Sespie   const char *fmt;
401*c87b03e5Sespie 
402*c87b03e5Sespie   if (x == NULL_RTX)
403*c87b03e5Sespie     return 0;
404*c87b03e5Sespie 
405*c87b03e5Sespie   code = GET_CODE (x);
406*c87b03e5Sespie 
407*c87b03e5Sespie   if (code == ADDRESSOF || x == current_function_internal_arg_pointer)
408*c87b03e5Sespie     return 1;
409*c87b03e5Sespie 
410*c87b03e5Sespie   if (code == MEM)
411*c87b03e5Sespie     return 0;
412*c87b03e5Sespie 
413*c87b03e5Sespie   /* Scan all subexpressions.  */
414*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
415*c87b03e5Sespie   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
416*c87b03e5Sespie     {
417*c87b03e5Sespie       if (*fmt == 'e')
418*c87b03e5Sespie 	{
419*c87b03e5Sespie 	  if (uses_addressof (XEXP (x, i)))
420*c87b03e5Sespie 	    return 1;
421*c87b03e5Sespie 	}
422*c87b03e5Sespie       else if (*fmt == 'E')
423*c87b03e5Sespie 	{
424*c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
425*c87b03e5Sespie 	    if (uses_addressof (XVECEXP (x, i, j)))
426*c87b03e5Sespie 	      return 1;
427*c87b03e5Sespie 	}
428*c87b03e5Sespie     }
429*c87b03e5Sespie   return 0;
430*c87b03e5Sespie }
431*c87b03e5Sespie 
432*c87b03e5Sespie /* Scan the sequence of insns in SEQ to see if any have an ADDRESSOF
433*c87b03e5Sespie    rtl expression or current_function_internal_arg_pointer occurrences
434*c87b03e5Sespie    not enclosed within a MEM.  If an ADDRESSOF expression or
435*c87b03e5Sespie    current_function_internal_arg_pointer is found, return nonzero, otherwise
436*c87b03e5Sespie    return zero.
437*c87b03e5Sespie 
438*c87b03e5Sespie    This function handles CALL_PLACEHOLDERs which contain multiple sequences
439*c87b03e5Sespie    of insns.  */
440*c87b03e5Sespie 
441*c87b03e5Sespie static int
sequence_uses_addressof(seq)442*c87b03e5Sespie sequence_uses_addressof (seq)
443*c87b03e5Sespie      rtx seq;
444*c87b03e5Sespie {
445*c87b03e5Sespie   rtx insn;
446*c87b03e5Sespie 
447*c87b03e5Sespie   for (insn = seq; insn; insn = NEXT_INSN (insn))
448*c87b03e5Sespie     if (INSN_P (insn))
449*c87b03e5Sespie       {
450*c87b03e5Sespie 	/* If this is a CALL_PLACEHOLDER, then recursively call ourselves
451*c87b03e5Sespie 	   with each nonempty sequence attached to the CALL_PLACEHOLDER.  */
452*c87b03e5Sespie 	if (GET_CODE (insn) == CALL_INSN
453*c87b03e5Sespie 	    && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
454*c87b03e5Sespie 	  {
455*c87b03e5Sespie 	    if (XEXP (PATTERN (insn), 0) != NULL_RTX
456*c87b03e5Sespie 		&& sequence_uses_addressof (XEXP (PATTERN (insn), 0)))
457*c87b03e5Sespie 	      return 1;
458*c87b03e5Sespie 	    if (XEXP (PATTERN (insn), 1) != NULL_RTX
459*c87b03e5Sespie 		&& sequence_uses_addressof (XEXP (PATTERN (insn), 1)))
460*c87b03e5Sespie 	      return 1;
461*c87b03e5Sespie 	    if (XEXP (PATTERN (insn), 2) != NULL_RTX
462*c87b03e5Sespie 		&& sequence_uses_addressof (XEXP (PATTERN (insn), 2)))
463*c87b03e5Sespie 	      return 1;
464*c87b03e5Sespie 	  }
465*c87b03e5Sespie 	else if (uses_addressof (PATTERN (insn))
466*c87b03e5Sespie 		 || (REG_NOTES (insn) && uses_addressof (REG_NOTES (insn))))
467*c87b03e5Sespie 	  return 1;
468*c87b03e5Sespie       }
469*c87b03e5Sespie   return 0;
470*c87b03e5Sespie }
471*c87b03e5Sespie 
472*c87b03e5Sespie /* Remove all REG_EQUIV notes found in the insn chain.  */
473*c87b03e5Sespie 
474*c87b03e5Sespie static void
purge_reg_equiv_notes()475*c87b03e5Sespie purge_reg_equiv_notes ()
476*c87b03e5Sespie {
477*c87b03e5Sespie   rtx insn;
478*c87b03e5Sespie 
479*c87b03e5Sespie   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
480*c87b03e5Sespie     {
481*c87b03e5Sespie       while (1)
482*c87b03e5Sespie 	{
483*c87b03e5Sespie 	  rtx note = find_reg_note (insn, REG_EQUIV, 0);
484*c87b03e5Sespie 	  if (note)
485*c87b03e5Sespie 	    {
486*c87b03e5Sespie 	      /* Remove the note and keep looking at the notes for
487*c87b03e5Sespie 		 this insn.  */
488*c87b03e5Sespie 	      remove_note (insn, note);
489*c87b03e5Sespie 	      continue;
490*c87b03e5Sespie 	    }
491*c87b03e5Sespie 	  break;
492*c87b03e5Sespie 	}
493*c87b03e5Sespie     }
494*c87b03e5Sespie }
495*c87b03e5Sespie 
496*c87b03e5Sespie /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs.  */
497*c87b03e5Sespie 
498*c87b03e5Sespie static void
purge_mem_unchanging_flag(x)499*c87b03e5Sespie purge_mem_unchanging_flag (x)
500*c87b03e5Sespie      rtx x;
501*c87b03e5Sespie {
502*c87b03e5Sespie   RTX_CODE code;
503*c87b03e5Sespie   int i, j;
504*c87b03e5Sespie   const char *fmt;
505*c87b03e5Sespie 
506*c87b03e5Sespie   if (x == NULL_RTX)
507*c87b03e5Sespie     return;
508*c87b03e5Sespie 
509*c87b03e5Sespie   code = GET_CODE (x);
510*c87b03e5Sespie 
511*c87b03e5Sespie   if (code == MEM)
512*c87b03e5Sespie     {
513*c87b03e5Sespie       if (RTX_UNCHANGING_P (x)
514*c87b03e5Sespie 	  && (XEXP (x, 0) == current_function_internal_arg_pointer
515*c87b03e5Sespie 	      || (GET_CODE (XEXP (x, 0)) == PLUS
516*c87b03e5Sespie 		  && XEXP (XEXP (x, 0), 0) ==
517*c87b03e5Sespie 		     current_function_internal_arg_pointer
518*c87b03e5Sespie 		  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
519*c87b03e5Sespie 	RTX_UNCHANGING_P (x) = 0;
520*c87b03e5Sespie       return;
521*c87b03e5Sespie     }
522*c87b03e5Sespie 
523*c87b03e5Sespie   /* Scan all subexpressions.  */
524*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
525*c87b03e5Sespie   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
526*c87b03e5Sespie     {
527*c87b03e5Sespie       if (*fmt == 'e')
528*c87b03e5Sespie 	purge_mem_unchanging_flag (XEXP (x, i));
529*c87b03e5Sespie       else if (*fmt == 'E')
530*c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
531*c87b03e5Sespie 	  purge_mem_unchanging_flag (XVECEXP (x, i, j));
532*c87b03e5Sespie     }
533*c87b03e5Sespie }
534*c87b03e5Sespie 
535*c87b03e5Sespie /* Replace the CALL_PLACEHOLDER with one of its children.  INSN should be
536*c87b03e5Sespie    the CALL_PLACEHOLDER insn; USE tells which child to use.  */
537*c87b03e5Sespie 
538*c87b03e5Sespie void
replace_call_placeholder(insn,use)539*c87b03e5Sespie replace_call_placeholder (insn, use)
540*c87b03e5Sespie      rtx insn;
541*c87b03e5Sespie      sibcall_use_t use;
542*c87b03e5Sespie {
543*c87b03e5Sespie   if (use == sibcall_use_tail_recursion)
544*c87b03e5Sespie     emit_insn_before (XEXP (PATTERN (insn), 2), insn);
545*c87b03e5Sespie   else if (use == sibcall_use_sibcall)
546*c87b03e5Sespie     emit_insn_before (XEXP (PATTERN (insn), 1), insn);
547*c87b03e5Sespie   else if (use == sibcall_use_normal)
548*c87b03e5Sespie     emit_insn_before (XEXP (PATTERN (insn), 0), insn);
549*c87b03e5Sespie   else
550*c87b03e5Sespie     abort ();
551*c87b03e5Sespie 
552*c87b03e5Sespie   /* Turn off LABEL_PRESERVE_P for the tail recursion label if it
553*c87b03e5Sespie      exists.  We only had to set it long enough to keep the jump
554*c87b03e5Sespie      pass above from deleting it as unused.  */
555*c87b03e5Sespie   if (XEXP (PATTERN (insn), 3))
556*c87b03e5Sespie     LABEL_PRESERVE_P (XEXP (PATTERN (insn), 3)) = 0;
557*c87b03e5Sespie 
558*c87b03e5Sespie   /* "Delete" the placeholder insn.  */
559*c87b03e5Sespie   remove_insn (insn);
560*c87b03e5Sespie }
561*c87b03e5Sespie 
562*c87b03e5Sespie /* Given a (possibly empty) set of potential sibling or tail recursion call
563*c87b03e5Sespie    sites, determine if optimization is possible.
564*c87b03e5Sespie 
565*c87b03e5Sespie    Potential sibling or tail recursion calls are marked with CALL_PLACEHOLDER
566*c87b03e5Sespie    insns.  The CALL_PLACEHOLDER insn holds chains of insns to implement a
567*c87b03e5Sespie    normal call, sibling call or tail recursive call.
568*c87b03e5Sespie 
569*c87b03e5Sespie    Replace the CALL_PLACEHOLDER with an appropriate insn chain.  */
570*c87b03e5Sespie 
571*c87b03e5Sespie void
optimize_sibling_and_tail_recursive_calls()572*c87b03e5Sespie optimize_sibling_and_tail_recursive_calls ()
573*c87b03e5Sespie {
574*c87b03e5Sespie   rtx insn, insns;
575*c87b03e5Sespie   basic_block alternate_exit = EXIT_BLOCK_PTR;
576*c87b03e5Sespie   bool no_sibcalls_this_function = false;
577*c87b03e5Sespie   bool successful_replacement = false;
578*c87b03e5Sespie   bool replaced_call_placeholder = false;
579*c87b03e5Sespie   edge e;
580*c87b03e5Sespie 
581*c87b03e5Sespie   insns = get_insns ();
582*c87b03e5Sespie 
583*c87b03e5Sespie   cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);
584*c87b03e5Sespie 
585*c87b03e5Sespie   /* If there are no basic blocks, then there is nothing to do.  */
586*c87b03e5Sespie   if (n_basic_blocks == 0)
587*c87b03e5Sespie     return;
588*c87b03e5Sespie 
589*c87b03e5Sespie   /* If we are using sjlj exceptions, we may need to add a call to
590*c87b03e5Sespie      _Unwind_SjLj_Unregister at exit of the function.  Which means
591*c87b03e5Sespie      that we cannot do any sibcall transformations.  */
592*c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
593*c87b03e5Sespie     no_sibcalls_this_function = true;
594*c87b03e5Sespie 
595*c87b03e5Sespie   return_value_pseudo = NULL_RTX;
596*c87b03e5Sespie 
597*c87b03e5Sespie   /* Find the exit block.
598*c87b03e5Sespie 
599*c87b03e5Sespie      It is possible that we have blocks which can reach the exit block
600*c87b03e5Sespie      directly.  However, most of the time a block will jump (or fall into)
601*c87b03e5Sespie      N_BASIC_BLOCKS - 1, which in turn falls into the exit block.  */
602*c87b03e5Sespie   for (e = EXIT_BLOCK_PTR->pred;
603*c87b03e5Sespie        e && alternate_exit == EXIT_BLOCK_PTR;
604*c87b03e5Sespie        e = e->pred_next)
605*c87b03e5Sespie     {
606*c87b03e5Sespie       rtx insn;
607*c87b03e5Sespie 
608*c87b03e5Sespie       if (e->dest != EXIT_BLOCK_PTR || e->succ_next != NULL)
609*c87b03e5Sespie 	continue;
610*c87b03e5Sespie 
611*c87b03e5Sespie       /* Walk forwards through the last normal block and see if it
612*c87b03e5Sespie 	 does nothing except fall into the exit block.  */
613*c87b03e5Sespie       for (insn = EXIT_BLOCK_PTR->prev_bb->head;
614*c87b03e5Sespie 	   insn;
615*c87b03e5Sespie 	   insn = NEXT_INSN (insn))
616*c87b03e5Sespie 	{
617*c87b03e5Sespie 	  rtx set;
618*c87b03e5Sespie 	  /* This should only happen once, at the start of this block.  */
619*c87b03e5Sespie 	  if (GET_CODE (insn) == CODE_LABEL)
620*c87b03e5Sespie 	    continue;
621*c87b03e5Sespie 
622*c87b03e5Sespie 	  if (GET_CODE (insn) == NOTE)
623*c87b03e5Sespie 	    continue;
624*c87b03e5Sespie 
625*c87b03e5Sespie 	  if (GET_CODE (insn) == INSN
626*c87b03e5Sespie 	      && GET_CODE (PATTERN (insn)) == USE)
627*c87b03e5Sespie 	    continue;
628*c87b03e5Sespie 
629*c87b03e5Sespie 	  /* Exit block also may contain copy from pseudo containing
630*c87b03e5Sespie 	     return value to hard register.  */
631*c87b03e5Sespie 	  if (GET_CODE (insn) == INSN
632*c87b03e5Sespie 	      && (set = single_set (insn))
633*c87b03e5Sespie 	      && SET_DEST (set) == current_function_return_rtx
634*c87b03e5Sespie 	      && REG_P (SET_SRC (set))
635*c87b03e5Sespie 	      && !return_value_pseudo)
636*c87b03e5Sespie 	    {
637*c87b03e5Sespie 	      return_value_pseudo = SET_SRC (set);
638*c87b03e5Sespie 	      continue;
639*c87b03e5Sespie 	    }
640*c87b03e5Sespie 
641*c87b03e5Sespie 	  break;
642*c87b03e5Sespie 	}
643*c87b03e5Sespie 
644*c87b03e5Sespie       /* If INSN is zero, then the search walked all the way through the
645*c87b03e5Sespie 	 block without hitting anything interesting.  This block is a
646*c87b03e5Sespie 	 valid alternate exit block.  */
647*c87b03e5Sespie       if (insn == NULL)
648*c87b03e5Sespie 	alternate_exit = e->src;
649*c87b03e5Sespie       else
650*c87b03e5Sespie 	return_value_pseudo = NULL;
651*c87b03e5Sespie     }
652*c87b03e5Sespie 
653*c87b03e5Sespie   /* If the function uses ADDRESSOF, we can't (easily) determine
654*c87b03e5Sespie      at this point if the value will end up on the stack.  */
655*c87b03e5Sespie   no_sibcalls_this_function |= sequence_uses_addressof (insns);
656*c87b03e5Sespie 
657*c87b03e5Sespie   /* Walk the insn chain and find any CALL_PLACEHOLDER insns.  We need to
658*c87b03e5Sespie      select one of the insn sequences attached to each CALL_PLACEHOLDER.
659*c87b03e5Sespie 
660*c87b03e5Sespie      The different sequences represent different ways to implement the call,
661*c87b03e5Sespie      ie, tail recursion, sibling call or normal call.
662*c87b03e5Sespie 
663*c87b03e5Sespie      Since we do not create nested CALL_PLACEHOLDERs, the scan
664*c87b03e5Sespie      continues with the insn that was after a replaced CALL_PLACEHOLDER;
665*c87b03e5Sespie      we don't rescan the replacement insns.  */
666*c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
667*c87b03e5Sespie     {
668*c87b03e5Sespie       if (GET_CODE (insn) == CALL_INSN
669*c87b03e5Sespie 	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
670*c87b03e5Sespie 	{
671*c87b03e5Sespie 	  int sibcall = (XEXP (PATTERN (insn), 1) != NULL_RTX);
672*c87b03e5Sespie 	  int tailrecursion = (XEXP (PATTERN (insn), 2) != NULL_RTX);
673*c87b03e5Sespie 	  basic_block call_block = BLOCK_FOR_INSN (insn);
674*c87b03e5Sespie 
675*c87b03e5Sespie 	  /* alloca (until we have stack slot life analysis) inhibits
676*c87b03e5Sespie 	     sibling call optimizations, but not tail recursion.
677*c87b03e5Sespie 	     Similarly if we use varargs or stdarg since they implicitly
678*c87b03e5Sespie 	     may take the address of an argument.  */
679*c87b03e5Sespie 	  if (current_function_calls_alloca || current_function_stdarg)
680*c87b03e5Sespie 	    sibcall = 0;
681*c87b03e5Sespie 
682*c87b03e5Sespie 	  /* See if there are any reasons we can't perform either sibling or
683*c87b03e5Sespie 	     tail call optimizations.  We must be careful with stack slots
684*c87b03e5Sespie 	     which are live at potential optimization sites.  */
685*c87b03e5Sespie 	  if (no_sibcalls_this_function
686*c87b03e5Sespie 	      /* ??? Overly conservative.  */
687*c87b03e5Sespie 	      || frame_offset
688*c87b03e5Sespie 	      /* Any function that calls setjmp might have longjmp called from
689*c87b03e5Sespie 		 any called function.  ??? We really should represent this
690*c87b03e5Sespie 		 properly in the CFG so that this needn't be special cased.  */
691*c87b03e5Sespie 	      || current_function_calls_setjmp
692*c87b03e5Sespie 	      /* Can't if more than one successor or single successor is not
693*c87b03e5Sespie 		 exit block.  These two tests prevent tail call optimization
694*c87b03e5Sespie 		 in the presense of active exception handlers.  */
695*c87b03e5Sespie 	      || call_block->succ == NULL
696*c87b03e5Sespie 	      || call_block->succ->succ_next != NULL
697*c87b03e5Sespie 	      || (call_block->succ->dest != EXIT_BLOCK_PTR
698*c87b03e5Sespie 		  && call_block->succ->dest != alternate_exit)
699*c87b03e5Sespie 	      /* If this call doesn't end the block, there are operations at
700*c87b03e5Sespie 		 the end of the block which we must execute after returning.  */
701*c87b03e5Sespie 	      || ! call_ends_block_p (insn, call_block->end))
702*c87b03e5Sespie 	    sibcall = 0, tailrecursion = 0;
703*c87b03e5Sespie 
704*c87b03e5Sespie 	  /* Select a set of insns to implement the call and emit them.
705*c87b03e5Sespie 	     Tail recursion is the most efficient, so select it over
706*c87b03e5Sespie 	     a tail/sibling call.  */
707*c87b03e5Sespie 
708*c87b03e5Sespie 	  if (sibcall || tailrecursion)
709*c87b03e5Sespie 	    successful_replacement = true;
710*c87b03e5Sespie 	  replaced_call_placeholder = true;
711*c87b03e5Sespie 
712*c87b03e5Sespie 	  replace_call_placeholder (insn,
713*c87b03e5Sespie 				    tailrecursion != 0
714*c87b03e5Sespie 				      ? sibcall_use_tail_recursion
715*c87b03e5Sespie 				      : sibcall != 0
716*c87b03e5Sespie 					 ? sibcall_use_sibcall
717*c87b03e5Sespie 					 : sibcall_use_normal);
718*c87b03e5Sespie 	}
719*c87b03e5Sespie     }
720*c87b03e5Sespie 
721*c87b03e5Sespie   if (successful_replacement)
722*c87b03e5Sespie     {
723*c87b03e5Sespie       rtx insn;
724*c87b03e5Sespie       tree arg;
725*c87b03e5Sespie 
726*c87b03e5Sespie       /* A sibling call sequence invalidates any REG_EQUIV notes made for
727*c87b03e5Sespie 	 this function's incoming arguments.
728*c87b03e5Sespie 
729*c87b03e5Sespie 	 At the start of RTL generation we know the only REG_EQUIV notes
730*c87b03e5Sespie 	 in the rtl chain are those for incoming arguments, so we can safely
731*c87b03e5Sespie 	 flush any REG_EQUIV note.
732*c87b03e5Sespie 
733*c87b03e5Sespie 	 This is (slight) overkill.  We could keep track of the highest
734*c87b03e5Sespie 	 argument we clobber and be more selective in removing notes, but it
735*c87b03e5Sespie 	 does not seem to be worth the effort.  */
736*c87b03e5Sespie       purge_reg_equiv_notes ();
737*c87b03e5Sespie 
738*c87b03e5Sespie       /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
739*c87b03e5Sespie 	 flag of some incoming arguments MEM RTLs, because it can write into
740*c87b03e5Sespie 	 those slots.  We clear all those bits now.
741*c87b03e5Sespie 
742*c87b03e5Sespie 	 This is (slight) overkill, we could keep track of which arguments
743*c87b03e5Sespie 	 we actually write into.  */
744*c87b03e5Sespie       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
745*c87b03e5Sespie 	{
746*c87b03e5Sespie 	  if (INSN_P (insn))
747*c87b03e5Sespie 	    purge_mem_unchanging_flag (PATTERN (insn));
748*c87b03e5Sespie 	}
749*c87b03e5Sespie 
750*c87b03e5Sespie       /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
751*c87b03e5Sespie 	 arguments passed in registers.  */
752*c87b03e5Sespie       for (arg = DECL_ARGUMENTS (current_function_decl);
753*c87b03e5Sespie 	   arg;
754*c87b03e5Sespie 	   arg = TREE_CHAIN (arg))
755*c87b03e5Sespie 	{
756*c87b03e5Sespie 	  if (REG_P (DECL_RTL (arg)))
757*c87b03e5Sespie 	    RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
758*c87b03e5Sespie 	}
759*c87b03e5Sespie     }
760*c87b03e5Sespie 
761*c87b03e5Sespie   /* There may have been NOTE_INSN_BLOCK_{BEGIN,END} notes in the
762*c87b03e5Sespie      CALL_PLACEHOLDER alternatives that we didn't emit.  Rebuild the
763*c87b03e5Sespie      lexical block tree to correspond to the notes that still exist.  */
764*c87b03e5Sespie   if (replaced_call_placeholder)
765*c87b03e5Sespie     reorder_blocks ();
766*c87b03e5Sespie 
767*c87b03e5Sespie   /* This information will be invalid after inline expansion.  Kill it now.  */
768*c87b03e5Sespie   free_basic_block_vars (0);
769*c87b03e5Sespie   free_EXPR_LIST_list (&tail_recursion_label_list);
770*c87b03e5Sespie }
771