1 /* Tail call optimization on trees.
2    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
32 #include "except.h"
33 #include "tree-pass.h"
34 #include "flags.h"
35 #include "langhooks.h"
36 #include "dbgcnt.h"
37 #include "target.h"
38 #include "common/common-target.h"
39 
40 /* The file implements the tail recursion elimination.  It is also used to
41    analyze the tail calls in general, passing the results to the rtl level
42    where they are used for sibcall optimization.
43 
44    In addition to the standard tail recursion elimination, we handle the most
45    trivial cases of making the call tail recursive by creating accumulators.
46    For example the following function
47 
48    int sum (int n)
49    {
50      if (n > 0)
51        return n + sum (n - 1);
52      else
53        return 0;
54    }
55 
56    is transformed into
57 
58    int sum (int n)
59    {
60      int acc = 0;
61 
62      while (n > 0)
63        acc += n--;
64 
65      return acc;
66    }
67 
68    To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69    when we reach the return x statement, we should return a_acc + x * m_acc
70    instead.  They are initially initialized to 0 and 1, respectively,
71    so the semantics of the function is obviously preserved.  If we are
72    guaranteed that the value of the accumulator never change, we
73    omit the accumulator.
74 
75    There are three cases how the function may exit.  The first one is
76    handled in adjust_return_value, the other two in adjust_accumulator_values
77    (the second case is actually a special case of the third one and we
78    present it separately just for clarity):
79 
80    1) Just return x, where x is not in any of the remaining special shapes.
81       We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 
83    2) return f (...), where f is the current function, is rewritten in a
84       classical tail-recursion elimination way, into assignment of arguments
85       and jump to the start of the function.  Values of the accumulators
86       are unchanged.
87 
88    3) return a + m * f(...), where a and m do not depend on call to f.
89       To preserve the semantics described before we want this to be rewritten
90       in such a way that we finally return
91 
92       a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 
94       I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95       eliminate the tail call to f.  Special cases when the value is just
96       added or just multiplied are obtained by setting a = 0 or m = 1.
97 
98    TODO -- it is possible to do similar tricks for other operations.  */
99 
100 /* A structure that describes the tailcall.  */
101 
102 struct tailcall
103 {
104   /* The iterator pointing to the call statement.  */
105   gimple_stmt_iterator call_gsi;
106 
107   /* True if it is a call to the current function.  */
108   bool tail_recursion;
109 
110   /* The return value of the caller is mult * f + add, where f is the return
111      value of the call.  */
112   tree mult, add;
113 
114   /* Next tailcall in the chain.  */
115   struct tailcall *next;
116 };
117 
118 /* The variables holding the value of multiplicative and additive
119    accumulator.  */
120 static tree m_acc, a_acc;
121 
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
126 
127 /* Returns false when the function is not suitable for tail call optimization
128    from some reason (e.g. if it takes variable number of arguments).  */
129 
130 static bool
131 suitable_for_tail_opt_p (void)
132 {
133   if (cfun->stdarg)
134     return false;
135 
136   return true;
137 }
138 /* Returns false when the function is not suitable for tail call optimization
139    from some reason (e.g. if it takes variable number of arguments).
140    This test must pass in addition to suitable_for_tail_opt_p in order to make
141    tail call discovery happen.  */
142 
143 static bool
144 suitable_for_tail_call_opt_p (void)
145 {
146   tree param;
147 
148   /* alloca (until we have stack slot life analysis) inhibits
149      sibling call optimizations, but not tail recursion.  */
150   if (cfun->calls_alloca)
151     return false;
152 
153   /* If we are using sjlj exceptions, we may need to add a call to
154      _Unwind_SjLj_Unregister at exit of the function.  Which means
155      that we cannot do any sibcall transformations.  */
156   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
157       && current_function_has_exception_handlers ())
158     return false;
159 
160   /* Any function that calls setjmp might have longjmp called from
161      any called function.  ??? We really should represent this
162      properly in the CFG so that this needn't be special cased.  */
163   if (cfun->calls_setjmp)
164     return false;
165 
166   /* ??? It is OK if the argument of a function is taken in some cases,
167      but not in all cases.  See PR15387 and PR19616.  Revisit for 4.1.  */
168   for (param = DECL_ARGUMENTS (current_function_decl);
169        param;
170        param = DECL_CHAIN (param))
171     if (TREE_ADDRESSABLE (param))
172       return false;
173 
174   return true;
175 }
176 
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178    statement pointed to by GSI (in a sense that we already know EXPR's value
179    at GSI).  We use the fact that we are only called from the chain of
180    basic blocks that have only single successor.  Returns the expression
181    containing the value of EXPR at GSI.  */
182 
183 static tree
184 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
185 {
186   basic_block bb, call_bb, at_bb;
187   edge e;
188   edge_iterator ei;
189 
190   if (is_gimple_min_invariant (expr))
191     return expr;
192 
193   if (TREE_CODE (expr) != SSA_NAME)
194     return NULL_TREE;
195 
196   /* Mark the blocks in the chain leading to the end.  */
197   at_bb = gimple_bb (at);
198   call_bb = gimple_bb (gsi_stmt (gsi));
199   for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
200     bb->aux = &bb->aux;
201   bb->aux = &bb->aux;
202 
203   while (1)
204     {
205       at = SSA_NAME_DEF_STMT (expr);
206       bb = gimple_bb (at);
207 
208       /* The default definition or defined before the chain.  */
209       if (!bb || !bb->aux)
210 	break;
211 
212       if (bb == call_bb)
213 	{
214 	  for (; !gsi_end_p (gsi); gsi_next (&gsi))
215 	    if (gsi_stmt (gsi) == at)
216 	      break;
217 
218 	  if (!gsi_end_p (gsi))
219 	    expr = NULL_TREE;
220 	  break;
221 	}
222 
223       if (gimple_code (at) != GIMPLE_PHI)
224 	{
225 	  expr = NULL_TREE;
226 	  break;
227 	}
228 
229       FOR_EACH_EDGE (e, ei, bb->preds)
230 	if (e->src->aux)
231 	  break;
232       gcc_assert (e);
233 
234       expr = PHI_ARG_DEF_FROM_EDGE (at, e);
235       if (TREE_CODE (expr) != SSA_NAME)
236 	{
237 	  /* The value is a constant.  */
238 	  break;
239 	}
240     }
241 
242   /* Unmark the blocks.  */
243   for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
244     bb->aux = NULL;
245   bb->aux = NULL;
246 
247   return expr;
248 }
249 
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251    recursive CALL passed in ASS_VAR.  M and A are the multiplicative and the
252    additive factor for the real return value.  */
253 
254 static bool
255 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
256 		    tree *a, tree *ass_var)
257 {
258   tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
259   tree dest = gimple_assign_lhs (stmt);
260   enum tree_code code = gimple_assign_rhs_code (stmt);
261   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
262   tree src_var = gimple_assign_rhs1 (stmt);
263 
264   /* See if this is a simple copy operation of an SSA name to the function
265      result.  In that case we may have a simple tail call.  Ignore type
266      conversions that can never produce extra code between the function
267      call and the function return.  */
268   if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
269       && (TREE_CODE (src_var) == SSA_NAME))
270     {
271       /* Reject a tailcall if the type conversion might need
272 	 additional code.  */
273       if (gimple_assign_cast_p (stmt)
274 	  && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
275 	return false;
276 
277       if (src_var != *ass_var)
278 	return false;
279 
280       *ass_var = dest;
281       return true;
282     }
283 
284   switch (rhs_class)
285     {
286     case GIMPLE_BINARY_RHS:
287       op1 = gimple_assign_rhs2 (stmt);
288 
289       /* Fall through.  */
290 
291     case GIMPLE_UNARY_RHS:
292       op0 = gimple_assign_rhs1 (stmt);
293       break;
294 
295     default:
296       return false;
297     }
298 
299   /* Accumulator optimizations will reverse the order of operations.
300      We can only do that for floating-point types if we're assuming
301      that addition and multiplication are associative.  */
302   if (!flag_associative_math)
303     if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
304       return false;
305 
306   if (rhs_class == GIMPLE_UNARY_RHS)
307     ;
308   else if (op0 == *ass_var
309       && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
310     ;
311   else if (op1 == *ass_var
312 	   && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
313     ;
314   else
315     return false;
316 
317   switch (code)
318     {
319     case PLUS_EXPR:
320       *a = non_ass_var;
321       *ass_var = dest;
322       return true;
323 
324     case MULT_EXPR:
325       *m = non_ass_var;
326       *ass_var = dest;
327       return true;
328 
329     case NEGATE_EXPR:
330       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
331         *m = build_real (TREE_TYPE (op0), dconstm1);
332       else if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
333         *m = build_int_cst (TREE_TYPE (op0), -1);
334       else
335         return false;
336 
337       *ass_var = dest;
338       return true;
339 
340     case MINUS_EXPR:
341       if (*ass_var == op0)
342         *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
343       else
344         {
345           if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
346             *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
347           else if (INTEGRAL_TYPE_P (TREE_TYPE (non_ass_var)))
348             *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
349 	  else
350 	    return false;
351 
352           *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
353         }
354 
355       *ass_var = dest;
356       return true;
357 
358       /* TODO -- Handle POINTER_PLUS_EXPR.  */
359 
360     default:
361       return false;
362     }
363 }
364 
365 /* Propagate VAR through phis on edge E.  */
366 
367 static tree
368 propagate_through_phis (tree var, edge e)
369 {
370   basic_block dest = e->dest;
371   gimple_stmt_iterator gsi;
372 
373   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
374     {
375       gimple phi = gsi_stmt (gsi);
376       if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
377         return PHI_RESULT (phi);
378     }
379   return var;
380 }
381 
382 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
383    added to the start of RET.  */
384 
385 static void
386 find_tail_calls (basic_block bb, struct tailcall **ret)
387 {
388   tree ass_var = NULL_TREE, ret_var, func, param;
389   gimple stmt, call = NULL;
390   gimple_stmt_iterator gsi, agsi;
391   bool tail_recursion;
392   struct tailcall *nw;
393   edge e;
394   tree m, a;
395   basic_block abb;
396   size_t idx;
397   tree var;
398   referenced_var_iterator rvi;
399 
400   if (!single_succ_p (bb))
401     return;
402 
403   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
404     {
405       stmt = gsi_stmt (gsi);
406 
407       /* Ignore labels, returns, clobbers and debug stmts.  */
408       if (gimple_code (stmt) == GIMPLE_LABEL
409 	  || gimple_code (stmt) == GIMPLE_RETURN
410 	  || gimple_clobber_p (stmt)
411 	  || is_gimple_debug (stmt))
412 	continue;
413 
414       /* Check for a call.  */
415       if (is_gimple_call (stmt))
416 	{
417 	  call = stmt;
418 	  ass_var = gimple_call_lhs (stmt);
419 	  break;
420 	}
421 
422       /* If the statement references memory or volatile operands, fail.  */
423       if (gimple_references_memory_p (stmt)
424 	  || gimple_has_volatile_ops (stmt))
425 	return;
426     }
427 
428   if (gsi_end_p (gsi))
429     {
430       edge_iterator ei;
431       /* Recurse to the predecessors.  */
432       FOR_EACH_EDGE (e, ei, bb->preds)
433 	find_tail_calls (e->src, ret);
434 
435       return;
436     }
437 
438   /* If the LHS of our call is not just a simple register, we can't
439      transform this into a tail or sibling call.  This situation happens,
440      in (e.g.) "*p = foo()" where foo returns a struct.  In this case
441      we won't have a temporary here, but we need to carry out the side
442      effect anyway, so tailcall is impossible.
443 
444      ??? In some situations (when the struct is returned in memory via
445      invisible argument) we could deal with this, e.g. by passing 'p'
446      itself as that argument to foo, but it's too early to do this here,
447      and expand_call() will not handle it anyway.  If it ever can, then
448      we need to revisit this here, to allow that situation.  */
449   if (ass_var && !is_gimple_reg (ass_var))
450     return;
451 
452   /* We found the call, check whether it is suitable.  */
453   tail_recursion = false;
454   func = gimple_call_fndecl (call);
455   if (func == current_function_decl)
456     {
457       tree arg;
458 
459       for (param = DECL_ARGUMENTS (func), idx = 0;
460 	   param && idx < gimple_call_num_args (call);
461 	   param = DECL_CHAIN (param), idx ++)
462 	{
463 	  arg = gimple_call_arg (call, idx);
464 	  if (param != arg)
465 	    {
466 	      /* Make sure there are no problems with copying.  The parameter
467 	         have a copyable type and the two arguments must have reasonably
468 	         equivalent types.  The latter requirement could be relaxed if
469 	         we emitted a suitable type conversion statement.  */
470 	      if (!is_gimple_reg_type (TREE_TYPE (param))
471 		  || !useless_type_conversion_p (TREE_TYPE (param),
472 					         TREE_TYPE (arg)))
473 		break;
474 
475 	      /* The parameter should be a real operand, so that phi node
476 		 created for it at the start of the function has the meaning
477 		 of copying the value.  This test implies is_gimple_reg_type
478 		 from the previous condition, however this one could be
479 		 relaxed by being more careful with copying the new value
480 		 of the parameter (emitting appropriate GIMPLE_ASSIGN and
481 		 updating the virtual operands).  */
482 	      if (!is_gimple_reg (param))
483 		break;
484 	    }
485 	}
486       if (idx == gimple_call_num_args (call) && !param)
487 	tail_recursion = true;
488     }
489 
490   /* Make sure the tail invocation of this function does not refer
491      to local variables.  */
492   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
493     {
494       if (TREE_CODE (var) != PARM_DECL
495 	  && auto_var_in_fn_p (var, cfun->decl)
496 	  && (ref_maybe_used_by_stmt_p (call, var)
497 	      || call_may_clobber_ref_p (call, var)))
498 	return;
499     }
500 
501   /* Now check the statements after the call.  None of them has virtual
502      operands, so they may only depend on the call through its return
503      value.  The return value should also be dependent on each of them,
504      since we are running after dce.  */
505   m = NULL_TREE;
506   a = NULL_TREE;
507 
508   abb = bb;
509   agsi = gsi;
510   while (1)
511     {
512       tree tmp_a = NULL_TREE;
513       tree tmp_m = NULL_TREE;
514       gsi_next (&agsi);
515 
516       while (gsi_end_p (agsi))
517 	{
518 	  ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
519 	  abb = single_succ (abb);
520 	  agsi = gsi_start_bb (abb);
521 	}
522 
523       stmt = gsi_stmt (agsi);
524 
525       if (gimple_code (stmt) == GIMPLE_LABEL)
526 	continue;
527 
528       if (gimple_code (stmt) == GIMPLE_RETURN)
529 	break;
530 
531       if (gimple_clobber_p (stmt))
532 	continue;
533 
534       if (is_gimple_debug (stmt))
535 	continue;
536 
537       if (gimple_code (stmt) != GIMPLE_ASSIGN)
538 	return;
539 
540       /* This is a gimple assign. */
541       if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
542 	return;
543 
544       if (tmp_a)
545 	{
546 	  tree type = TREE_TYPE (tmp_a);
547 	  if (a)
548 	    a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
549 	  else
550 	    a = tmp_a;
551 	}
552       if (tmp_m)
553 	{
554 	  tree type = TREE_TYPE (tmp_m);
555 	  if (m)
556 	    m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
557 	  else
558 	    m = tmp_m;
559 
560 	  if (a)
561 	    a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
562 	}
563     }
564 
565   /* See if this is a tail call we can handle.  */
566   ret_var = gimple_return_retval (stmt);
567 
568   /* We may proceed if there either is no return value, or the return value
569      is identical to the call's return.  */
570   if (ret_var
571       && (ret_var != ass_var))
572     return;
573 
574   /* If this is not a tail recursive call, we cannot handle addends or
575      multiplicands.  */
576   if (!tail_recursion && (m || a))
577     return;
578 
579   /* For pointers don't allow additions or multiplications.  */
580   if ((m || a)
581       && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
582     return;
583 
584   nw = XNEW (struct tailcall);
585 
586   nw->call_gsi = gsi;
587 
588   nw->tail_recursion = tail_recursion;
589 
590   nw->mult = m;
591   nw->add = a;
592 
593   nw->next = *ret;
594   *ret = nw;
595 }
596 
597 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E.  */
598 
599 static void
600 add_successor_phi_arg (edge e, tree var, tree phi_arg)
601 {
602   gimple_stmt_iterator gsi;
603 
604   for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
605     if (PHI_RESULT (gsi_stmt (gsi)) == var)
606       break;
607 
608   gcc_assert (!gsi_end_p (gsi));
609   add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
610 }
611 
612 /* Creates a GIMPLE statement which computes the operation specified by
613    CODE, ACC and OP1 to a new variable with name LABEL and inserts the
614    statement in the position specified by GSI.  Returns the
615    tree node of the statement's result.  */
616 
617 static tree
618 adjust_return_value_with_ops (enum tree_code code, const char *label,
619 			      tree acc, tree op1, gimple_stmt_iterator gsi)
620 {
621 
622   tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
623   tree tmp = create_tmp_reg (ret_type, label);
624   gimple stmt;
625   tree result;
626 
627   add_referenced_var (tmp);
628 
629   if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
630     stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
631   else
632     {
633       tree rhs = fold_convert (TREE_TYPE (acc),
634 			       fold_build2 (code,
635 					    TREE_TYPE (op1),
636 					    fold_convert (TREE_TYPE (op1), acc),
637 					    op1));
638       rhs = force_gimple_operand_gsi (&gsi, rhs,
639 				      false, NULL, true, GSI_SAME_STMT);
640       stmt = gimple_build_assign (NULL_TREE, rhs);
641     }
642 
643   result = make_ssa_name (tmp, stmt);
644   gimple_assign_set_lhs (stmt, result);
645   update_stmt (stmt);
646   gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
647   return result;
648 }
649 
650 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
651    the computation specified by CODE and OP1 and insert the statement
652    at the position specified by GSI as a new statement.  Returns new SSA name
653    of updated accumulator.  */
654 
655 static tree
656 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
657 			     gimple_stmt_iterator gsi)
658 {
659   gimple stmt;
660   tree var;
661   if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
662     stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
663   else
664     {
665       tree rhs = fold_convert (TREE_TYPE (acc),
666 			       fold_build2 (code,
667 					    TREE_TYPE (op1),
668 					    fold_convert (TREE_TYPE (op1), acc),
669 					    op1));
670       rhs = force_gimple_operand_gsi (&gsi, rhs,
671 				      false, NULL, false, GSI_CONTINUE_LINKING);
672       stmt = gimple_build_assign (NULL_TREE, rhs);
673     }
674   var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
675   gimple_assign_set_lhs (stmt, var);
676   update_stmt (stmt);
677   gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
678   return var;
679 }
680 
681 /* Adjust the accumulator values according to A and M after GSI, and update
682    the phi nodes on edge BACK.  */
683 
684 static void
685 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
686 {
687   tree var, a_acc_arg, m_acc_arg;
688 
689   if (m)
690     m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
691   if (a)
692     a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
693 
694   a_acc_arg = a_acc;
695   m_acc_arg = m_acc;
696   if (a)
697     {
698       if (m_acc)
699 	{
700 	  if (integer_onep (a))
701 	    var = m_acc;
702 	  else
703 	    var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
704 						a, gsi);
705 	}
706       else
707 	var = a;
708 
709       a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
710     }
711 
712   if (m)
713     m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
714 
715   if (a_acc)
716     add_successor_phi_arg (back, a_acc, a_acc_arg);
717 
718   if (m_acc)
719     add_successor_phi_arg (back, m_acc, m_acc_arg);
720 }
721 
722 /* Adjust value of the return at the end of BB according to M and A
723    accumulators.  */
724 
725 static void
726 adjust_return_value (basic_block bb, tree m, tree a)
727 {
728   tree retval;
729   gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
730   gimple_stmt_iterator gsi = gsi_last_bb (bb);
731 
732   gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
733 
734   retval = gimple_return_retval (ret_stmt);
735   if (!retval || retval == error_mark_node)
736     return;
737 
738   if (m)
739     retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
740 					   gsi);
741   if (a)
742     retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
743 					   gsi);
744   gimple_return_set_retval (ret_stmt, retval);
745   update_stmt (ret_stmt);
746 }
747 
748 /* Subtract COUNT and FREQUENCY from the basic block and it's
749    outgoing edge.  */
750 static void
751 decrease_profile (basic_block bb, gcov_type count, int frequency)
752 {
753   edge e;
754   bb->count -= count;
755   if (bb->count < 0)
756     bb->count = 0;
757   bb->frequency -= frequency;
758   if (bb->frequency < 0)
759     bb->frequency = 0;
760   if (!single_succ_p (bb))
761     {
762       gcc_assert (!EDGE_COUNT (bb->succs));
763       return;
764     }
765   e = single_succ_edge (bb);
766   e->count -= count;
767   if (e->count < 0)
768     e->count = 0;
769 }
770 
771 /* Returns true if argument PARAM of the tail recursive call needs to be copied
772    when the call is eliminated.  */
773 
774 static bool
775 arg_needs_copy_p (tree param)
776 {
777   tree def;
778 
779   if (!is_gimple_reg (param) || !var_ann (param))
780     return false;
781 
782   /* Parameters that are only defined but never used need not be copied.  */
783   def = gimple_default_def (cfun, param);
784   if (!def)
785     return false;
786 
787   return true;
788 }
789 
790 /* Eliminates tail call described by T.  TMP_VARS is a list of
791    temporary variables used to copy the function arguments.  */
792 
793 static void
794 eliminate_tail_call (struct tailcall *t)
795 {
796   tree param, rslt;
797   gimple stmt, call;
798   tree arg;
799   size_t idx;
800   basic_block bb, first;
801   edge e;
802   gimple phi;
803   gimple_stmt_iterator gsi;
804   gimple orig_stmt;
805 
806   stmt = orig_stmt = gsi_stmt (t->call_gsi);
807   bb = gsi_bb (t->call_gsi);
808 
809   if (dump_file && (dump_flags & TDF_DETAILS))
810     {
811       fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
812 	       bb->index);
813       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
814       fprintf (dump_file, "\n");
815     }
816 
817   gcc_assert (is_gimple_call (stmt));
818 
819   first = single_succ (ENTRY_BLOCK_PTR);
820 
821   /* Remove the code after call_gsi that will become unreachable.  The
822      possibly unreachable code in other blocks is removed later in
823      cfg cleanup.  */
824   gsi = t->call_gsi;
825   gsi_next (&gsi);
826   while (!gsi_end_p (gsi))
827     {
828       gimple t = gsi_stmt (gsi);
829       /* Do not remove the return statement, so that redirect_edge_and_branch
830 	 sees how the block ends.  */
831       if (gimple_code (t) == GIMPLE_RETURN)
832 	break;
833 
834       gsi_remove (&gsi, true);
835       release_defs (t);
836     }
837 
838   /* Number of executions of function has reduced by the tailcall.  */
839   e = single_succ_edge (gsi_bb (t->call_gsi));
840   decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
841   decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
842   if (e->dest != EXIT_BLOCK_PTR)
843     decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
844 
845   /* Replace the call by a jump to the start of function.  */
846   e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
847 				first);
848   gcc_assert (e);
849   PENDING_STMT (e) = NULL;
850 
851   /* Add phi node entries for arguments.  The ordering of the phi nodes should
852      be the same as the ordering of the arguments.  */
853   for (param = DECL_ARGUMENTS (current_function_decl),
854 	 idx = 0, gsi = gsi_start_phis (first);
855        param;
856        param = DECL_CHAIN (param), idx++)
857     {
858       if (!arg_needs_copy_p (param))
859 	continue;
860 
861       arg = gimple_call_arg (stmt, idx);
862       phi = gsi_stmt (gsi);
863       gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
864 
865       add_phi_arg (phi, arg, e, gimple_location (stmt));
866       gsi_next (&gsi);
867     }
868 
869   /* Update the values of accumulators.  */
870   adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
871 
872   call = gsi_stmt (t->call_gsi);
873   rslt = gimple_call_lhs (call);
874   if (rslt != NULL_TREE)
875     {
876       /* Result of the call will no longer be defined.  So adjust the
877 	 SSA_NAME_DEF_STMT accordingly.  */
878       SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
879     }
880 
881   gsi_remove (&t->call_gsi, true);
882   release_defs (call);
883 }
884 
885 /* Add phi nodes for the virtual operands defined in the function to the
886    header of the loop created by tail recursion elimination.
887 
888    Originally, we used to add phi nodes only for call clobbered variables,
889    as the value of the non-call clobbered ones obviously cannot be used
890    or changed within the recursive call.  However, the local variables
891    from multiple calls now share the same location, so the virtual ssa form
892    requires us to say that the location dies on further iterations of the loop,
893    which requires adding phi nodes.
894 */
895 static void
896 add_virtual_phis (void)
897 {
898   referenced_var_iterator rvi;
899   tree var;
900 
901   /* The problematic part is that there is no way how to know what
902      to put into phi nodes (there in fact does not have to be such
903      ssa name available).  A solution would be to have an artificial
904      use/kill for all virtual operands in EXIT node.  Unless we have
905      this, we cannot do much better than to rebuild the ssa form for
906      possibly affected virtual ssa names from scratch.  */
907 
908   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
909     {
910       if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
911 	mark_sym_for_renaming (var);
912     }
913 }
914 
915 /* Optimizes the tailcall described by T.  If OPT_TAILCALLS is true, also
916    mark the tailcalls for the sibcall optimization.  */
917 
918 static bool
919 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
920 {
921   if (t->tail_recursion)
922     {
923       eliminate_tail_call (t);
924       return true;
925     }
926 
927   if (opt_tailcalls)
928     {
929       gimple stmt = gsi_stmt (t->call_gsi);
930 
931       gimple_call_set_tail (stmt, true);
932       if (dump_file && (dump_flags & TDF_DETAILS))
933         {
934 	  fprintf (dump_file, "Found tail call ");
935 	  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
936 	  fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
937 	}
938     }
939 
940   return false;
941 }
942 
943 /* Creates a tail-call accumulator of the same type as the return type of the
944    current function.  LABEL is the name used to creating the temporary
945    variable for the accumulator.  The accumulator will be inserted in the
946    phis of a basic block BB with single predecessor with an initial value
947    INIT converted to the current function return type.  */
948 
949 static tree
950 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
951 {
952   tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
953   tree tmp = create_tmp_reg (ret_type, label);
954   gimple phi;
955 
956   add_referenced_var (tmp);
957   phi = create_phi_node (tmp, bb);
958   /* RET_TYPE can be a float when -ffast-maths is enabled.  */
959   add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
960 	       UNKNOWN_LOCATION);
961   return PHI_RESULT (phi);
962 }
963 
964 /* Optimizes tail calls in the function, turning the tail recursion
965    into iteration.  */
966 
967 static unsigned int
968 tree_optimize_tail_calls_1 (bool opt_tailcalls)
969 {
970   edge e;
971   bool phis_constructed = false;
972   struct tailcall *tailcalls = NULL, *act, *next;
973   bool changed = false;
974   basic_block first = single_succ (ENTRY_BLOCK_PTR);
975   tree param;
976   gimple stmt;
977   edge_iterator ei;
978 
979   if (!suitable_for_tail_opt_p ())
980     return 0;
981   if (opt_tailcalls)
982     opt_tailcalls = suitable_for_tail_call_opt_p ();
983 
984   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
985     {
986       /* Only traverse the normal exits, i.e. those that end with return
987 	 statement.  */
988       stmt = last_stmt (e->src);
989 
990       if (stmt
991 	  && gimple_code (stmt) == GIMPLE_RETURN)
992 	find_tail_calls (e->src, &tailcalls);
993     }
994 
995   /* Construct the phi nodes and accumulators if necessary.  */
996   a_acc = m_acc = NULL_TREE;
997   for (act = tailcalls; act; act = act->next)
998     {
999       if (!act->tail_recursion)
1000 	continue;
1001 
1002       if (!phis_constructed)
1003 	{
1004 	  /* Ensure that there is only one predecessor of the block
1005 	     or if there are existing degenerate PHI nodes.  */
1006 	  if (!single_pred_p (first)
1007 	      || !gimple_seq_empty_p (phi_nodes (first)))
1008 	    first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
1009 
1010 	  /* Copy the args if needed.  */
1011 	  for (param = DECL_ARGUMENTS (current_function_decl);
1012 	       param;
1013 	       param = DECL_CHAIN (param))
1014 	    if (arg_needs_copy_p (param))
1015 	      {
1016 		tree name = gimple_default_def (cfun, param);
1017 		tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1018 		gimple phi;
1019 
1020 		set_default_def (param, new_name);
1021 		phi = create_phi_node (name, first);
1022 		SSA_NAME_DEF_STMT (name) = phi;
1023 		add_phi_arg (phi, new_name, single_pred_edge (first),
1024 			     EXPR_LOCATION (param));
1025 	      }
1026 	  phis_constructed = true;
1027 	}
1028 
1029       if (act->add && !a_acc)
1030 	a_acc = create_tailcall_accumulator ("add_acc", first,
1031 					     integer_zero_node);
1032 
1033       if (act->mult && !m_acc)
1034 	m_acc = create_tailcall_accumulator ("mult_acc", first,
1035 					     integer_one_node);
1036     }
1037 
1038   if (a_acc || m_acc)
1039     {
1040       /* When the tail call elimination using accumulators is performed,
1041 	 statements adding the accumulated value are inserted at all exits.
1042 	 This turns all other tail calls to non-tail ones.  */
1043       opt_tailcalls = false;
1044     }
1045 
1046   for (; tailcalls; tailcalls = next)
1047     {
1048       next = tailcalls->next;
1049       changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1050       free (tailcalls);
1051     }
1052 
1053   if (a_acc || m_acc)
1054     {
1055       /* Modify the remaining return statements.  */
1056       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1057 	{
1058 	  stmt = last_stmt (e->src);
1059 
1060 	  if (stmt
1061 	      && gimple_code (stmt) == GIMPLE_RETURN)
1062 	    adjust_return_value (e->src, m_acc, a_acc);
1063 	}
1064     }
1065 
1066   if (changed)
1067     free_dominance_info (CDI_DOMINATORS);
1068 
1069   if (phis_constructed)
1070     add_virtual_phis ();
1071   if (changed)
1072     return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1073   return 0;
1074 }
1075 
1076 static unsigned int
1077 execute_tail_recursion (void)
1078 {
1079   return tree_optimize_tail_calls_1 (false);
1080 }
1081 
1082 static bool
1083 gate_tail_calls (void)
1084 {
1085   return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1086 }
1087 
1088 static unsigned int
1089 execute_tail_calls (void)
1090 {
1091   return tree_optimize_tail_calls_1 (true);
1092 }
1093 
1094 struct gimple_opt_pass pass_tail_recursion =
1095 {
1096  {
1097   GIMPLE_PASS,
1098   "tailr",				/* name */
1099   gate_tail_calls,			/* gate */
1100   execute_tail_recursion,		/* execute */
1101   NULL,					/* sub */
1102   NULL,					/* next */
1103   0,					/* static_pass_number */
1104   TV_NONE,				/* tv_id */
1105   PROP_cfg | PROP_ssa,			/* properties_required */
1106   0,					/* properties_provided */
1107   0,					/* properties_destroyed */
1108   0,					/* todo_flags_start */
1109   TODO_verify_ssa	                /* todo_flags_finish */
1110  }
1111 };
1112 
1113 struct gimple_opt_pass pass_tail_calls =
1114 {
1115  {
1116   GIMPLE_PASS,
1117   "tailc",				/* name */
1118   gate_tail_calls,			/* gate */
1119   execute_tail_calls,			/* execute */
1120   NULL,					/* sub */
1121   NULL,					/* next */
1122   0,					/* static_pass_number */
1123   TV_NONE,				/* tv_id */
1124   PROP_cfg | PROP_ssa,			/* properties_required */
1125   0,					/* properties_provided */
1126   0,					/* properties_destroyed */
1127   0,					/* todo_flags_start */
1128   TODO_verify_ssa	                /* todo_flags_finish */
1129  }
1130 };
1131