1 /* Generic SSA value propagation engine.
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3    Free Software Foundation, Inc.
4    Contributed by Diego Novillo <dnovillo@redhat.com>
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published by the
10    Free Software Foundation; either version 3, or (at your option) any
11    later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16    for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-flow.h"
36 #include "tree-pass.h"
37 #include "tree-ssa-propagate.h"
38 #include "langhooks.h"
39 #include "vec.h"
40 #include "value-prof.h"
41 #include "gimple.h"
42 
43 /* This file implements a generic value propagation engine based on
44    the same propagation used by the SSA-CCP algorithm [1].
45 
46    Propagation is performed by simulating the execution of every
47    statement that produces the value being propagated.  Simulation
48    proceeds as follows:
49 
50    1- Initially, all edges of the CFG are marked not executable and
51       the CFG worklist is seeded with all the statements in the entry
52       basic block (block 0).
53 
54    2- Every statement S is simulated with a call to the call-back
55       function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
56       results:
57 
58       	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
59 	    interest and does not affect any of the work lists.
60 
61 	SSA_PROP_VARYING: The value produced by S cannot be determined
62 	    at compile time.  Further simulation of S is not required.
63 	    If S is a conditional jump, all the outgoing edges for the
64 	    block are considered executable and added to the work
65 	    list.
66 
67 	SSA_PROP_INTERESTING: S produces a value that can be computed
68 	    at compile time.  Its result can be propagated into the
69 	    statements that feed from S.  Furthermore, if S is a
70 	    conditional jump, only the edge known to be taken is added
71 	    to the work list.  Edges that are known not to execute are
72 	    never simulated.
73 
74    3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
75       return value from SSA_PROP_VISIT_PHI has the same semantics as
76       described in #2.
77 
78    4- Three work lists are kept.  Statements are only added to these
79       lists if they produce one of SSA_PROP_INTERESTING or
80       SSA_PROP_VARYING.
81 
82    	CFG_BLOCKS contains the list of blocks to be simulated.
83 	    Blocks are added to this list if their incoming edges are
84 	    found executable.
85 
86 	VARYING_SSA_EDGES contains the list of statements that feed
87 	    from statements that produce an SSA_PROP_VARYING result.
88 	    These are simulated first to speed up processing.
89 
90 	INTERESTING_SSA_EDGES contains the list of statements that
91 	    feed from statements that produce an SSA_PROP_INTERESTING
92 	    result.
93 
94    5- Simulation terminates when all three work lists are drained.
95 
96    Before calling ssa_propagate, it is important to clear
97    prop_simulate_again_p for all the statements in the program that
98    should be simulated.  This initialization allows an implementation
99    to specify which statements should never be simulated.
100 
101    It is also important to compute def-use information before calling
102    ssa_propagate.
103 
104    References:
105 
106      [1] Constant propagation with conditional branches,
107          Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
108 
109      [2] Building an Optimizing Compiler,
110 	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
111 
112      [3] Advanced Compiler Design and Implementation,
113 	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
114 
115 /* Function pointers used to parameterize the propagation engine.  */
116 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
117 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
118 
119 /* Keep track of statements that have been added to one of the SSA
120    edges worklists.  This flag is used to avoid visiting statements
121    unnecessarily when draining an SSA edge worklist.  If while
122    simulating a basic block, we find a statement with
123    STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
124    processing from visiting it again.
125 
126    NOTE: users of the propagation engine are not allowed to use
127    the GF_PLF_1 flag.  */
128 #define STMT_IN_SSA_EDGE_WORKLIST	GF_PLF_1
129 
130 /* A bitmap to keep track of executable blocks in the CFG.  */
131 static sbitmap executable_blocks;
132 
133 /* Array of control flow edges on the worklist.  */
134 static VEC(basic_block,heap) *cfg_blocks;
135 
136 static unsigned int cfg_blocks_num = 0;
137 static int cfg_blocks_tail;
138 static int cfg_blocks_head;
139 
140 static sbitmap bb_in_list;
141 
142 /* Worklist of SSA edges which will need reexamination as their
143    definition has changed.  SSA edges are def-use edges in the SSA
144    web.  For each D-U edge, we store the target statement or PHI node
145    U.  */
146 static GTY(()) VEC(gimple,gc) *interesting_ssa_edges;
147 
148 /* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
149    list of SSA edges is split into two.  One contains all SSA edges
150    who need to be reexamined because their lattice value changed to
151    varying (this worklist), and the other contains all other SSA edges
152    to be reexamined (INTERESTING_SSA_EDGES).
153 
154    Since most values in the program are VARYING, the ideal situation
155    is to move them to that lattice value as quickly as possible.
156    Thus, it doesn't make sense to process any other type of lattice
157    value until all VARYING values are propagated fully, which is one
158    thing using the VARYING worklist achieves.  In addition, if we
159    don't use a separate worklist for VARYING edges, we end up with
160    situations where lattice values move from
161    UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
162 static GTY(()) VEC(gimple,gc) *varying_ssa_edges;
163 
164 
165 /* Return true if the block worklist empty.  */
166 
167 static inline bool
168 cfg_blocks_empty_p (void)
169 {
170   return (cfg_blocks_num == 0);
171 }
172 
173 
174 /* Add a basic block to the worklist.  The block must not be already
175    in the worklist, and it must not be the ENTRY or EXIT block.  */
176 
177 static void
178 cfg_blocks_add (basic_block bb)
179 {
180   bool head = false;
181 
182   gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
183   gcc_assert (!TEST_BIT (bb_in_list, bb->index));
184 
185   if (cfg_blocks_empty_p ())
186     {
187       cfg_blocks_tail = cfg_blocks_head = 0;
188       cfg_blocks_num = 1;
189     }
190   else
191     {
192       cfg_blocks_num++;
193       if (cfg_blocks_num > VEC_length (basic_block, cfg_blocks))
194 	{
195 	  /* We have to grow the array now.  Adjust to queue to occupy
196 	     the full space of the original array.  We do not need to
197 	     initialize the newly allocated portion of the array
198 	     because we keep track of CFG_BLOCKS_HEAD and
199 	     CFG_BLOCKS_HEAD.  */
200 	  cfg_blocks_tail = VEC_length (basic_block, cfg_blocks);
201 	  cfg_blocks_head = 0;
202 	  VEC_safe_grow (basic_block, heap, cfg_blocks, 2 * cfg_blocks_tail);
203 	}
204       /* Minor optimization: we prefer to see blocks with more
205 	 predecessors later, because there is more of a chance that
206 	 the incoming edges will be executable.  */
207       else if (EDGE_COUNT (bb->preds)
208 	       >= EDGE_COUNT (VEC_index (basic_block, cfg_blocks,
209 					 cfg_blocks_head)->preds))
210 	cfg_blocks_tail = ((cfg_blocks_tail + 1)
211 			   % VEC_length (basic_block, cfg_blocks));
212       else
213 	{
214 	  if (cfg_blocks_head == 0)
215 	    cfg_blocks_head = VEC_length (basic_block, cfg_blocks);
216 	  --cfg_blocks_head;
217 	  head = true;
218 	}
219     }
220 
221   VEC_replace (basic_block, cfg_blocks,
222 	       head ? cfg_blocks_head : cfg_blocks_tail,
223 	       bb);
224   SET_BIT (bb_in_list, bb->index);
225 }
226 
227 
228 /* Remove a block from the worklist.  */
229 
230 static basic_block
231 cfg_blocks_get (void)
232 {
233   basic_block bb;
234 
235   bb = VEC_index (basic_block, cfg_blocks, cfg_blocks_head);
236 
237   gcc_assert (!cfg_blocks_empty_p ());
238   gcc_assert (bb);
239 
240   cfg_blocks_head = ((cfg_blocks_head + 1)
241 		     % VEC_length (basic_block, cfg_blocks));
242   --cfg_blocks_num;
243   RESET_BIT (bb_in_list, bb->index);
244 
245   return bb;
246 }
247 
248 
249 /* We have just defined a new value for VAR.  If IS_VARYING is true,
250    add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
251    them to INTERESTING_SSA_EDGES.  */
252 
253 static void
254 add_ssa_edge (tree var, bool is_varying)
255 {
256   imm_use_iterator iter;
257   use_operand_p use_p;
258 
259   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
260     {
261       gimple use_stmt = USE_STMT (use_p);
262 
263       if (prop_simulate_again_p (use_stmt)
264 	  && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
265 	{
266 	  gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
267 	  if (is_varying)
268 	    VEC_safe_push (gimple, gc, varying_ssa_edges, use_stmt);
269 	  else
270 	    VEC_safe_push (gimple, gc, interesting_ssa_edges, use_stmt);
271 	}
272     }
273 }
274 
275 
276 /* Add edge E to the control flow worklist.  */
277 
278 static void
279 add_control_edge (edge e)
280 {
281   basic_block bb = e->dest;
282   if (bb == EXIT_BLOCK_PTR)
283     return;
284 
285   /* If the edge had already been executed, skip it.  */
286   if (e->flags & EDGE_EXECUTABLE)
287     return;
288 
289   e->flags |= EDGE_EXECUTABLE;
290 
291   /* If the block is already in the list, we're done.  */
292   if (TEST_BIT (bb_in_list, bb->index))
293     return;
294 
295   cfg_blocks_add (bb);
296 
297   if (dump_file && (dump_flags & TDF_DETAILS))
298     fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
299 	e->src->index, e->dest->index);
300 }
301 
302 
303 /* Simulate the execution of STMT and update the work lists accordingly.  */
304 
305 static void
306 simulate_stmt (gimple stmt)
307 {
308   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
309   edge taken_edge = NULL;
310   tree output_name = NULL_TREE;
311 
312   /* Don't bother visiting statements that are already
313      considered varying by the propagator.  */
314   if (!prop_simulate_again_p (stmt))
315     return;
316 
317   if (gimple_code (stmt) == GIMPLE_PHI)
318     {
319       val = ssa_prop_visit_phi (stmt);
320       output_name = gimple_phi_result (stmt);
321     }
322   else
323     val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
324 
325   if (val == SSA_PROP_VARYING)
326     {
327       prop_set_simulate_again (stmt, false);
328 
329       /* If the statement produced a new varying value, add the SSA
330 	 edges coming out of OUTPUT_NAME.  */
331       if (output_name)
332 	add_ssa_edge (output_name, true);
333 
334       /* If STMT transfers control out of its basic block, add
335 	 all outgoing edges to the work list.  */
336       if (stmt_ends_bb_p (stmt))
337 	{
338 	  edge e;
339 	  edge_iterator ei;
340 	  basic_block bb = gimple_bb (stmt);
341 	  FOR_EACH_EDGE (e, ei, bb->succs)
342 	    add_control_edge (e);
343 	}
344     }
345   else if (val == SSA_PROP_INTERESTING)
346     {
347       /* If the statement produced new value, add the SSA edges coming
348 	 out of OUTPUT_NAME.  */
349       if (output_name)
350 	add_ssa_edge (output_name, false);
351 
352       /* If we know which edge is going to be taken out of this block,
353 	 add it to the CFG work list.  */
354       if (taken_edge)
355 	add_control_edge (taken_edge);
356     }
357 }
358 
359 /* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
360    drain.  This pops statements off the given WORKLIST and processes
361    them until there are no more statements on WORKLIST.
362    We take a pointer to WORKLIST because it may be reallocated when an
363    SSA edge is added to it in simulate_stmt.  */
364 
365 static void
366 process_ssa_edge_worklist (VEC(gimple,gc) **worklist)
367 {
368   /* Drain the entire worklist.  */
369   while (VEC_length (gimple, *worklist) > 0)
370     {
371       basic_block bb;
372 
373       /* Pull the statement to simulate off the worklist.  */
374       gimple stmt = VEC_pop (gimple, *worklist);
375 
376       /* If this statement was already visited by simulate_block, then
377 	 we don't need to visit it again here.  */
378       if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
379 	continue;
380 
381       /* STMT is no longer in a worklist.  */
382       gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
383 
384       if (dump_file && (dump_flags & TDF_DETAILS))
385 	{
386 	  fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
387 	  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
388 	}
389 
390       bb = gimple_bb (stmt);
391 
392       /* PHI nodes are always visited, regardless of whether or not
393 	 the destination block is executable.  Otherwise, visit the
394 	 statement only if its block is marked executable.  */
395       if (gimple_code (stmt) == GIMPLE_PHI
396 	  || TEST_BIT (executable_blocks, bb->index))
397 	simulate_stmt (stmt);
398     }
399 }
400 
401 
402 /* Simulate the execution of BLOCK.  Evaluate the statement associated
403    with each variable reference inside the block.  */
404 
405 static void
406 simulate_block (basic_block block)
407 {
408   gimple_stmt_iterator gsi;
409 
410   /* There is nothing to do for the exit block.  */
411   if (block == EXIT_BLOCK_PTR)
412     return;
413 
414   if (dump_file && (dump_flags & TDF_DETAILS))
415     fprintf (dump_file, "\nSimulating block %d\n", block->index);
416 
417   /* Always simulate PHI nodes, even if we have simulated this block
418      before.  */
419   for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
420     simulate_stmt (gsi_stmt (gsi));
421 
422   /* If this is the first time we've simulated this block, then we
423      must simulate each of its statements.  */
424   if (!TEST_BIT (executable_blocks, block->index))
425     {
426       gimple_stmt_iterator j;
427       unsigned int normal_edge_count;
428       edge e, normal_edge;
429       edge_iterator ei;
430 
431       /* Note that we have simulated this block.  */
432       SET_BIT (executable_blocks, block->index);
433 
434       for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
435 	{
436 	  gimple stmt = gsi_stmt (j);
437 
438 	  /* If this statement is already in the worklist then
439 	     "cancel" it.  The reevaluation implied by the worklist
440 	     entry will produce the same value we generate here and
441 	     thus reevaluating it again from the worklist is
442 	     pointless.  */
443 	  if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
444 	    gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
445 
446 	  simulate_stmt (stmt);
447 	}
448 
449       /* We can not predict when abnormal and EH edges will be executed, so
450 	 once a block is considered executable, we consider any
451 	 outgoing abnormal edges as executable.
452 
453 	 TODO: This is not exactly true.  Simplifying statement might
454 	 prove it non-throwing and also computed goto can be handled
455 	 when destination is known.
456 
457 	 At the same time, if this block has only one successor that is
458 	 reached by non-abnormal edges, then add that successor to the
459 	 worklist.  */
460       normal_edge_count = 0;
461       normal_edge = NULL;
462       FOR_EACH_EDGE (e, ei, block->succs)
463 	{
464 	  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
465 	    add_control_edge (e);
466 	  else
467 	    {
468 	      normal_edge_count++;
469 	      normal_edge = e;
470 	    }
471 	}
472 
473       if (normal_edge_count == 1)
474 	add_control_edge (normal_edge);
475     }
476 }
477 
478 
479 /* Initialize local data structures and work lists.  */
480 
481 static void
482 ssa_prop_init (void)
483 {
484   edge e;
485   edge_iterator ei;
486   basic_block bb;
487 
488   /* Worklists of SSA edges.  */
489   interesting_ssa_edges = VEC_alloc (gimple, gc, 20);
490   varying_ssa_edges = VEC_alloc (gimple, gc, 20);
491 
492   executable_blocks = sbitmap_alloc (last_basic_block);
493   sbitmap_zero (executable_blocks);
494 
495   bb_in_list = sbitmap_alloc (last_basic_block);
496   sbitmap_zero (bb_in_list);
497 
498   if (dump_file && (dump_flags & TDF_DETAILS))
499     dump_immediate_uses (dump_file);
500 
501   cfg_blocks = VEC_alloc (basic_block, heap, 20);
502   VEC_safe_grow (basic_block, heap, cfg_blocks, 20);
503 
504   /* Initially assume that every edge in the CFG is not executable.
505      (including the edges coming out of ENTRY_BLOCK_PTR).  */
506   FOR_ALL_BB (bb)
507     {
508       gimple_stmt_iterator si;
509 
510       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
511 	gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
512 
513       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
514 	gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
515 
516       FOR_EACH_EDGE (e, ei, bb->succs)
517 	e->flags &= ~EDGE_EXECUTABLE;
518     }
519 
520   /* Seed the algorithm by adding the successors of the entry block to the
521      edge worklist.  */
522   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
523     add_control_edge (e);
524 }
525 
526 
527 /* Free allocated storage.  */
528 
529 static void
530 ssa_prop_fini (void)
531 {
532   VEC_free (gimple, gc, interesting_ssa_edges);
533   VEC_free (gimple, gc, varying_ssa_edges);
534   VEC_free (basic_block, heap, cfg_blocks);
535   cfg_blocks = NULL;
536   sbitmap_free (bb_in_list);
537   sbitmap_free (executable_blocks);
538 }
539 
540 
541 /* Return true if EXPR is an acceptable right-hand-side for a
542    GIMPLE assignment.  We validate the entire tree, not just
543    the root node, thus catching expressions that embed complex
544    operands that are not permitted in GIMPLE.  This function
545    is needed because the folding routines in fold-const.c
546    may return such expressions in some cases, e.g., an array
547    access with an embedded index addition.  It may make more
548    sense to have folding routines that are sensitive to the
549    constraints on GIMPLE operands, rather than abandoning any
550    any attempt to fold if the usual folding turns out to be too
551    aggressive.  */
552 
553 bool
554 valid_gimple_rhs_p (tree expr)
555 {
556   enum tree_code code = TREE_CODE (expr);
557 
558   switch (TREE_CODE_CLASS (code))
559     {
560     case tcc_declaration:
561       if (!is_gimple_variable (expr))
562 	return false;
563       break;
564 
565     case tcc_constant:
566       /* All constants are ok.  */
567       break;
568 
569     case tcc_binary:
570     case tcc_comparison:
571       if (!is_gimple_val (TREE_OPERAND (expr, 0))
572 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
573 	return false;
574       break;
575 
576     case tcc_unary:
577       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
578 	return false;
579       break;
580 
581     case tcc_expression:
582       switch (code)
583         {
584         case ADDR_EXPR:
585           {
586 	    tree t;
587 	    if (is_gimple_min_invariant (expr))
588 	      return true;
589             t = TREE_OPERAND (expr, 0);
590             while (handled_component_p (t))
591               {
592                 /* ??? More checks needed, see the GIMPLE verifier.  */
593                 if ((TREE_CODE (t) == ARRAY_REF
594                      || TREE_CODE (t) == ARRAY_RANGE_REF)
595                     && !is_gimple_val (TREE_OPERAND (t, 1)))
596                   return false;
597                 t = TREE_OPERAND (t, 0);
598               }
599             if (!is_gimple_id (t))
600               return false;
601           }
602           break;
603 
604 	default:
605 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
606 	    {
607 	      if (((code == VEC_COND_EXPR || code == COND_EXPR)
608 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
609 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
610 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
611 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
612 		return false;
613 	      break;
614 	    }
615 	  return false;
616 	}
617       break;
618 
619     case tcc_vl_exp:
620       return false;
621 
622     case tcc_exceptional:
623       if (code != SSA_NAME)
624         return false;
625       break;
626 
627     default:
628       return false;
629     }
630 
631   return true;
632 }
633 
634 
635 /* Return true if EXPR is a CALL_EXPR suitable for representation
636    as a single GIMPLE_CALL statement.  If the arguments require
637    further gimplification, return false.  */
638 
639 static bool
640 valid_gimple_call_p (tree expr)
641 {
642   unsigned i, nargs;
643 
644   if (TREE_CODE (expr) != CALL_EXPR)
645     return false;
646 
647   nargs = call_expr_nargs (expr);
648   for (i = 0; i < nargs; i++)
649     {
650       tree arg = CALL_EXPR_ARG (expr, i);
651       if (is_gimple_reg_type (arg))
652 	{
653 	  if (!is_gimple_val (arg))
654 	    return false;
655 	}
656       else
657 	if (!is_gimple_lvalue (arg))
658 	  return false;
659     }
660 
661   return true;
662 }
663 
664 
665 /* Make SSA names defined by OLD_STMT point to NEW_STMT
666    as their defining statement.  */
667 
668 void
669 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
670 {
671   tree var;
672   ssa_op_iter iter;
673 
674   if (gimple_in_ssa_p (cfun))
675     {
676       /* Make defined SSA_NAMEs point to the new
677          statement as their definition.  */
678       FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
679         {
680           if (TREE_CODE (var) == SSA_NAME)
681             SSA_NAME_DEF_STMT (var) = new_stmt;
682         }
683     }
684 }
685 
686 /* Helper function for update_gimple_call and update_call_from_tree.
687    A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT.  */
688 
689 static void
690 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
691 			   gimple stmt)
692 {
693   gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
694   move_ssa_defining_stmt_for_defs (new_stmt, stmt);
695   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
696   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
697   gimple_set_location (new_stmt, gimple_location (stmt));
698   if (gimple_block (new_stmt) == NULL_TREE)
699     gimple_set_block (new_stmt, gimple_block (stmt));
700   gsi_replace (si_p, new_stmt, false);
701 }
702 
703 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
704    with number of arguments NARGS, where the arguments in GIMPLE form
705    follow NARGS argument.  */
706 
707 bool
708 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
709 {
710   va_list ap;
711   gimple new_stmt, stmt = gsi_stmt (*si_p);
712 
713   gcc_assert (is_gimple_call (stmt));
714   va_start (ap, nargs);
715   new_stmt = gimple_build_call_valist (fn, nargs, ap);
716   finish_update_gimple_call (si_p, new_stmt, stmt);
717   va_end (ap);
718   return true;
719 }
720 
721 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
722    value of EXPR, which is expected to be the result of folding the
723    call.  This can only be done if EXPR is a CALL_EXPR with valid
724    GIMPLE operands as arguments, or if it is a suitable RHS expression
725    for a GIMPLE_ASSIGN.  More complex expressions will require
726    gimplification, which will introduce addtional statements.  In this
727    event, no update is performed, and the function returns false.
728    Note that we cannot mutate a GIMPLE_CALL in-place, so we always
729    replace the statement at *SI_P with an entirely new statement.
730    The new statement need not be a call, e.g., if the original call
731    folded to a constant.  */
732 
733 bool
734 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
735 {
736   gimple stmt = gsi_stmt (*si_p);
737 
738   if (valid_gimple_call_p (expr))
739     {
740       /* The call has simplified to another call.  */
741       tree fn = CALL_EXPR_FN (expr);
742       unsigned i;
743       unsigned nargs = call_expr_nargs (expr);
744       VEC(tree, heap) *args = NULL;
745       gimple new_stmt;
746 
747       if (nargs > 0)
748         {
749           args = VEC_alloc (tree, heap, nargs);
750           VEC_safe_grow (tree, heap, args, nargs);
751 
752           for (i = 0; i < nargs; i++)
753             VEC_replace (tree, args, i, CALL_EXPR_ARG (expr, i));
754         }
755 
756       new_stmt = gimple_build_call_vec (fn, args);
757       finish_update_gimple_call (si_p, new_stmt, stmt);
758       VEC_free (tree, heap, args);
759 
760       return true;
761     }
762   else if (valid_gimple_rhs_p (expr))
763     {
764       tree lhs = gimple_call_lhs (stmt);
765       gimple new_stmt;
766 
767       /* The call has simplified to an expression
768          that cannot be represented as a GIMPLE_CALL. */
769       if (lhs)
770         {
771           /* A value is expected.
772              Introduce a new GIMPLE_ASSIGN statement.  */
773           STRIP_USELESS_TYPE_CONVERSION (expr);
774           new_stmt = gimple_build_assign (lhs, expr);
775           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
776 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
777 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
778         }
779       else if (!TREE_SIDE_EFFECTS (expr))
780         {
781           /* No value is expected, and EXPR has no effect.
782              Replace it with an empty statement.  */
783           new_stmt = gimple_build_nop ();
784 	  if (gimple_in_ssa_p (cfun))
785 	    {
786 	      unlink_stmt_vdef (stmt);
787 	      release_defs (stmt);
788 	    }
789         }
790       else
791         {
792           /* No value is expected, but EXPR has an effect,
793              e.g., it could be a reference to a volatile
794              variable.  Create an assignment statement
795              with a dummy (unused) lhs variable.  */
796           STRIP_USELESS_TYPE_CONVERSION (expr);
797           lhs = create_tmp_var (TREE_TYPE (expr), NULL);
798           new_stmt = gimple_build_assign (lhs, expr);
799           add_referenced_var (lhs);
800 	  if (gimple_in_ssa_p (cfun))
801 	    lhs = make_ssa_name (lhs, new_stmt);
802           gimple_assign_set_lhs (new_stmt, lhs);
803 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
804 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
805           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
806         }
807       gimple_set_location (new_stmt, gimple_location (stmt));
808       gsi_replace (si_p, new_stmt, false);
809       return true;
810     }
811   else
812     /* The call simplified to an expression that is
813        not a valid GIMPLE RHS.  */
814     return false;
815 }
816 
817 
818 /* Entry point to the propagation engine.
819 
820    VISIT_STMT is called for every statement visited.
821    VISIT_PHI is called for every PHI node visited.  */
822 
823 void
824 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
825 	       ssa_prop_visit_phi_fn visit_phi)
826 {
827   ssa_prop_visit_stmt = visit_stmt;
828   ssa_prop_visit_phi = visit_phi;
829 
830   ssa_prop_init ();
831 
832   /* Iterate until the worklists are empty.  */
833   while (!cfg_blocks_empty_p ()
834 	 || VEC_length (gimple, interesting_ssa_edges) > 0
835 	 || VEC_length (gimple, varying_ssa_edges) > 0)
836     {
837       if (!cfg_blocks_empty_p ())
838 	{
839 	  /* Pull the next block to simulate off the worklist.  */
840 	  basic_block dest_block = cfg_blocks_get ();
841 	  simulate_block (dest_block);
842 	}
843 
844       /* In order to move things to varying as quickly as
845 	 possible,process the VARYING_SSA_EDGES worklist first.  */
846       process_ssa_edge_worklist (&varying_ssa_edges);
847 
848       /* Now process the INTERESTING_SSA_EDGES worklist.  */
849       process_ssa_edge_worklist (&interesting_ssa_edges);
850     }
851 
852   ssa_prop_fini ();
853 }
854 
855 
856 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
857    is a non-volatile pointer dereference, a structure reference or a
858    reference to a single _DECL.  Ignore volatile memory references
859    because they are not interesting for the optimizers.  */
860 
861 bool
862 stmt_makes_single_store (gimple stmt)
863 {
864   tree lhs;
865 
866   if (gimple_code (stmt) != GIMPLE_ASSIGN
867       && gimple_code (stmt) != GIMPLE_CALL)
868     return false;
869 
870   if (!gimple_vdef (stmt))
871     return false;
872 
873   lhs = gimple_get_lhs (stmt);
874 
875   /* A call statement may have a null LHS.  */
876   if (!lhs)
877     return false;
878 
879   return (!TREE_THIS_VOLATILE (lhs)
880           && (DECL_P (lhs)
881 	      || REFERENCE_CLASS_P (lhs)));
882 }
883 
884 
885 /* Propagation statistics.  */
886 struct prop_stats_d
887 {
888   long num_const_prop;
889   long num_copy_prop;
890   long num_stmts_folded;
891   long num_dce;
892 };
893 
894 static struct prop_stats_d prop_stats;
895 
896 /* Replace USE references in statement STMT with the values stored in
897    PROP_VALUE. Return true if at least one reference was replaced.  */
898 
899 static bool
900 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
901 {
902   bool replaced = false;
903   use_operand_p use;
904   ssa_op_iter iter;
905 
906   FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
907     {
908       tree tuse = USE_FROM_PTR (use);
909       tree val = (*get_value) (tuse);
910 
911       if (val == tuse || val == NULL_TREE)
912 	continue;
913 
914       if (gimple_code (stmt) == GIMPLE_ASM
915 	  && !may_propagate_copy_into_asm (tuse))
916 	continue;
917 
918       if (!may_propagate_copy (tuse, val))
919 	continue;
920 
921       if (TREE_CODE (val) != SSA_NAME)
922 	prop_stats.num_const_prop++;
923       else
924 	prop_stats.num_copy_prop++;
925 
926       propagate_value (use, val);
927 
928       replaced = true;
929     }
930 
931   return replaced;
932 }
933 
934 
935 /* Replace propagated values into all the arguments for PHI using the
936    values from PROP_VALUE.  */
937 
938 static void
939 replace_phi_args_in (gimple phi, ssa_prop_get_value_fn get_value)
940 {
941   size_t i;
942   bool replaced = false;
943 
944   if (dump_file && (dump_flags & TDF_DETAILS))
945     {
946       fprintf (dump_file, "Folding PHI node: ");
947       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
948     }
949 
950   for (i = 0; i < gimple_phi_num_args (phi); i++)
951     {
952       tree arg = gimple_phi_arg_def (phi, i);
953 
954       if (TREE_CODE (arg) == SSA_NAME)
955 	{
956 	  tree val = (*get_value) (arg);
957 
958 	  if (val && val != arg && may_propagate_copy (arg, val))
959 	    {
960 	      if (TREE_CODE (val) != SSA_NAME)
961 		prop_stats.num_const_prop++;
962 	      else
963 		prop_stats.num_copy_prop++;
964 
965 	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
966 	      replaced = true;
967 
968 	      /* If we propagated a copy and this argument flows
969 		 through an abnormal edge, update the replacement
970 		 accordingly.  */
971 	      if (TREE_CODE (val) == SSA_NAME
972 		  && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL)
973 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
974 	    }
975 	}
976     }
977 
978   if (dump_file && (dump_flags & TDF_DETAILS))
979     {
980       if (!replaced)
981 	fprintf (dump_file, "No folding possible\n");
982       else
983 	{
984 	  fprintf (dump_file, "Folded into: ");
985 	  print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
986 	  fprintf (dump_file, "\n");
987 	}
988     }
989 }
990 
991 
992 /* Perform final substitution and folding of propagated values.
993 
994    PROP_VALUE[I] contains the single value that should be substituted
995    at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
996    substituted.
997 
998    If FOLD_FN is non-NULL the function will be invoked on all statements
999    before propagating values for pass specific simplification.
1000 
1001    DO_DCE is true if trivially dead stmts can be removed.
1002 
1003    If DO_DCE is true, the statements within a BB are walked from
1004    last to first element.  Otherwise we scan from first to last element.
1005 
1006    Return TRUE when something changed.  */
1007 
1008 bool
1009 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1010 		     ssa_prop_fold_stmt_fn fold_fn,
1011 		     bool do_dce)
1012 {
1013   basic_block bb;
1014   bool something_changed = false;
1015   unsigned i;
1016 
1017   if (!get_value_fn && !fold_fn)
1018     return false;
1019 
1020   if (dump_file && (dump_flags & TDF_DETAILS))
1021     fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1022 
1023   memset (&prop_stats, 0, sizeof (prop_stats));
1024 
1025   /* Substitute lattice values at definition sites.  */
1026   if (get_value_fn)
1027     for (i = 1; i < num_ssa_names; ++i)
1028       {
1029 	tree name = ssa_name (i);
1030 	tree val;
1031 	gimple def_stmt;
1032 	gimple_stmt_iterator gsi;
1033 
1034 	if (!name
1035 	    || !is_gimple_reg (name))
1036 	  continue;
1037 
1038 	def_stmt = SSA_NAME_DEF_STMT (name);
1039 	if (gimple_nop_p (def_stmt)
1040 	    /* Do not substitute ASSERT_EXPR rhs, this will confuse VRP.  */
1041 	    || (gimple_assign_single_p (def_stmt)
1042 		&& gimple_assign_rhs_code (def_stmt) == ASSERT_EXPR)
1043 	    || !(val = (*get_value_fn) (name))
1044 	    || !may_propagate_copy (name, val))
1045 	  continue;
1046 
1047 	gsi = gsi_for_stmt (def_stmt);
1048 	if (is_gimple_assign (def_stmt))
1049 	  {
1050 	    gimple_assign_set_rhs_with_ops (&gsi, TREE_CODE (val),
1051 					    val, NULL_TREE);
1052 	    gcc_assert (gsi_stmt (gsi) == def_stmt);
1053 	    if (maybe_clean_eh_stmt (def_stmt))
1054 	      gimple_purge_dead_eh_edges (gimple_bb (def_stmt));
1055 	    update_stmt (def_stmt);
1056 	  }
1057 	else if (is_gimple_call (def_stmt))
1058 	  {
1059 	    int flags = gimple_call_flags (def_stmt);
1060 
1061 	    /* Don't optimize away calls that have side-effects.  */
1062 	    if ((flags & (ECF_CONST|ECF_PURE)) == 0
1063 		|| (flags & ECF_LOOPING_CONST_OR_PURE))
1064 	      continue;
1065 	    if (update_call_from_tree (&gsi, val)
1066 		&& maybe_clean_or_replace_eh_stmt (def_stmt, gsi_stmt (gsi)))
1067 	      gimple_purge_dead_eh_edges (gimple_bb (gsi_stmt (gsi)));
1068 	  }
1069 	else if (gimple_code (def_stmt) == GIMPLE_PHI)
1070 	  {
1071 	    gimple new_stmt = gimple_build_assign (name, val);
1072 	    gimple_stmt_iterator gsi2;
1073 	    SSA_NAME_DEF_STMT (name) = new_stmt;
1074 	    gsi2 = gsi_after_labels (gimple_bb (def_stmt));
1075 	    gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
1076 	    remove_phi_node (&gsi, false);
1077 	  }
1078 
1079 	something_changed = true;
1080       }
1081 
1082   /* Propagate into all uses and fold.  */
1083   FOR_EACH_BB (bb)
1084     {
1085       gimple_stmt_iterator i;
1086 
1087       /* Propagate known values into PHI nodes.  */
1088       if (get_value_fn)
1089 	for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
1090 	  replace_phi_args_in (gsi_stmt (i), get_value_fn);
1091 
1092       /* Propagate known values into stmts.  Do a backward walk if
1093          do_dce is true. In some case it exposes
1094 	 more trivially deletable stmts to walk backward.  */
1095       for (i = (do_dce ? gsi_last_bb (bb) : gsi_start_bb (bb)); !gsi_end_p (i);)
1096 	{
1097           bool did_replace;
1098 	  gimple stmt = gsi_stmt (i);
1099 	  gimple old_stmt;
1100 	  enum gimple_code code = gimple_code (stmt);
1101 	  gimple_stmt_iterator oldi;
1102 
1103 	  oldi = i;
1104 	  if (do_dce)
1105 	    gsi_prev (&i);
1106 	  else
1107 	    gsi_next (&i);
1108 
1109 	  /* Ignore ASSERT_EXPRs.  They are used by VRP to generate
1110 	     range information for names and they are discarded
1111 	     afterwards.  */
1112 
1113 	  if (code == GIMPLE_ASSIGN
1114 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1115 	    continue;
1116 
1117 	  /* No point propagating into a stmt whose result is not used,
1118 	     but instead we might be able to remove a trivially dead stmt.
1119 	     Don't do this when called from VRP, since the SSA_NAME which
1120 	     is going to be released could be still referenced in VRP
1121 	     ranges.  */
1122 	  if (do_dce
1123 	      && gimple_get_lhs (stmt)
1124 	      && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
1125 	      && has_zero_uses (gimple_get_lhs (stmt))
1126 	      && !stmt_could_throw_p (stmt)
1127 	      && !gimple_has_side_effects (stmt))
1128 	    {
1129 	      gimple_stmt_iterator i2;
1130 
1131 	      if (dump_file && dump_flags & TDF_DETAILS)
1132 		{
1133 		  fprintf (dump_file, "Removing dead stmt ");
1134 		  print_gimple_stmt (dump_file, stmt, 0, 0);
1135 		  fprintf (dump_file, "\n");
1136 		}
1137 	      prop_stats.num_dce++;
1138 	      i2 = gsi_for_stmt (stmt);
1139 	      gsi_remove (&i2, true);
1140 	      release_defs (stmt);
1141 	      continue;
1142 	    }
1143 
1144 	  /* Replace the statement with its folded version and mark it
1145 	     folded.  */
1146 	  did_replace = false;
1147 	  if (dump_file && (dump_flags & TDF_DETAILS))
1148 	    {
1149 	      fprintf (dump_file, "Folding statement: ");
1150 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1151 	    }
1152 
1153 	  old_stmt = stmt;
1154 
1155 	  /* Some statements may be simplified using propagator
1156 	     specific information.  Do this before propagating
1157 	     into the stmt to not disturb pass specific information.  */
1158 	  if (fold_fn
1159 	      && (*fold_fn)(&oldi))
1160 	    {
1161 	      did_replace = true;
1162 	      prop_stats.num_stmts_folded++;
1163 	      stmt = gsi_stmt (oldi);
1164 	      update_stmt (stmt);
1165 	    }
1166 
1167 	  /* Replace real uses in the statement.  */
1168 	  if (get_value_fn)
1169 	    did_replace |= replace_uses_in (stmt, get_value_fn);
1170 
1171 	  /* If we made a replacement, fold the statement.  */
1172 	  if (did_replace)
1173 	    fold_stmt (&oldi);
1174 
1175 	  /* Now cleanup.  */
1176 	  if (did_replace)
1177 	    {
1178 	      stmt = gsi_stmt (oldi);
1179 
1180               /* If we cleaned up EH information from the statement,
1181                  remove EH edges.  */
1182 	      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1183 		gimple_purge_dead_eh_edges (bb);
1184 
1185               if (is_gimple_assign (stmt)
1186                   && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1187                       == GIMPLE_SINGLE_RHS))
1188               {
1189                 tree rhs = gimple_assign_rhs1 (stmt);
1190 
1191                 if (TREE_CODE (rhs) == ADDR_EXPR)
1192                   recompute_tree_invariant_for_addr_expr (rhs);
1193               }
1194 
1195 	      /* Determine what needs to be done to update the SSA form.  */
1196 	      update_stmt (stmt);
1197 	      if (!is_gimple_debug (stmt))
1198 		something_changed = true;
1199 	    }
1200 
1201 	  if (dump_file && (dump_flags & TDF_DETAILS))
1202 	    {
1203 	      if (did_replace)
1204 		{
1205 		  fprintf (dump_file, "Folded into: ");
1206 		  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1207 		  fprintf (dump_file, "\n");
1208 		}
1209 	      else
1210 		fprintf (dump_file, "Not folded\n");
1211 	    }
1212 	}
1213     }
1214 
1215   statistics_counter_event (cfun, "Constants propagated",
1216 			    prop_stats.num_const_prop);
1217   statistics_counter_event (cfun, "Copies propagated",
1218 			    prop_stats.num_copy_prop);
1219   statistics_counter_event (cfun, "Statements folded",
1220 			    prop_stats.num_stmts_folded);
1221   statistics_counter_event (cfun, "Statements deleted",
1222 			    prop_stats.num_dce);
1223   return something_changed;
1224 }
1225 
1226 #include "gt-tree-ssa-propagate.h"
1227