1 /* Generic SSA value propagation engine.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by the
9    Free Software Foundation; either version 3, or (at your option) any
10    later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "gimple-pretty-print.h"
31 #include "dumpfile.h"
32 #include "tree-flow.h"
33 #include "tree-ssa-propagate.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "value-prof.h"
37 #include "gimple.h"
38 
39 /* This file implements a generic value propagation engine based on
40    the same propagation used by the SSA-CCP algorithm [1].
41 
42    Propagation is performed by simulating the execution of every
43    statement that produces the value being propagated.  Simulation
44    proceeds as follows:
45 
46    1- Initially, all edges of the CFG are marked not executable and
47       the CFG worklist is seeded with all the statements in the entry
48       basic block (block 0).
49 
50    2- Every statement S is simulated with a call to the call-back
51       function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
52       results:
53 
54       	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
55 	    interest and does not affect any of the work lists.
56 
57 	SSA_PROP_VARYING: The value produced by S cannot be determined
58 	    at compile time.  Further simulation of S is not required.
59 	    If S is a conditional jump, all the outgoing edges for the
60 	    block are considered executable and added to the work
61 	    list.
62 
63 	SSA_PROP_INTERESTING: S produces a value that can be computed
64 	    at compile time.  Its result can be propagated into the
65 	    statements that feed from S.  Furthermore, if S is a
66 	    conditional jump, only the edge known to be taken is added
67 	    to the work list.  Edges that are known not to execute are
68 	    never simulated.
69 
70    3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
71       return value from SSA_PROP_VISIT_PHI has the same semantics as
72       described in #2.
73 
74    4- Three work lists are kept.  Statements are only added to these
75       lists if they produce one of SSA_PROP_INTERESTING or
76       SSA_PROP_VARYING.
77 
78    	CFG_BLOCKS contains the list of blocks to be simulated.
79 	    Blocks are added to this list if their incoming edges are
80 	    found executable.
81 
82 	VARYING_SSA_EDGES contains the list of statements that feed
83 	    from statements that produce an SSA_PROP_VARYING result.
84 	    These are simulated first to speed up processing.
85 
86 	INTERESTING_SSA_EDGES contains the list of statements that
87 	    feed from statements that produce an SSA_PROP_INTERESTING
88 	    result.
89 
90    5- Simulation terminates when all three work lists are drained.
91 
92    Before calling ssa_propagate, it is important to clear
93    prop_simulate_again_p for all the statements in the program that
94    should be simulated.  This initialization allows an implementation
95    to specify which statements should never be simulated.
96 
97    It is also important to compute def-use information before calling
98    ssa_propagate.
99 
100    References:
101 
102      [1] Constant propagation with conditional branches,
103          Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
104 
105      [2] Building an Optimizing Compiler,
106 	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
107 
108      [3] Advanced Compiler Design and Implementation,
109 	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
110 
111 /* Function pointers used to parameterize the propagation engine.  */
112 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
113 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
114 
115 /* Keep track of statements that have been added to one of the SSA
116    edges worklists.  This flag is used to avoid visiting statements
117    unnecessarily when draining an SSA edge worklist.  If while
118    simulating a basic block, we find a statement with
119    STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
120    processing from visiting it again.
121 
122    NOTE: users of the propagation engine are not allowed to use
123    the GF_PLF_1 flag.  */
124 #define STMT_IN_SSA_EDGE_WORKLIST	GF_PLF_1
125 
126 /* A bitmap to keep track of executable blocks in the CFG.  */
127 static sbitmap executable_blocks;
128 
129 /* Array of control flow edges on the worklist.  */
130 static vec<basic_block> cfg_blocks;
131 
132 static unsigned int cfg_blocks_num = 0;
133 static int cfg_blocks_tail;
134 static int cfg_blocks_head;
135 
136 static sbitmap bb_in_list;
137 
138 /* Worklist of SSA edges which will need reexamination as their
139    definition has changed.  SSA edges are def-use edges in the SSA
140    web.  For each D-U edge, we store the target statement or PHI node
141    U.  */
142 static GTY(()) vec<gimple, va_gc> *interesting_ssa_edges;
143 
144 /* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
145    list of SSA edges is split into two.  One contains all SSA edges
146    who need to be reexamined because their lattice value changed to
147    varying (this worklist), and the other contains all other SSA edges
148    to be reexamined (INTERESTING_SSA_EDGES).
149 
150    Since most values in the program are VARYING, the ideal situation
151    is to move them to that lattice value as quickly as possible.
152    Thus, it doesn't make sense to process any other type of lattice
153    value until all VARYING values are propagated fully, which is one
154    thing using the VARYING worklist achieves.  In addition, if we
155    don't use a separate worklist for VARYING edges, we end up with
156    situations where lattice values move from
157    UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
158 static GTY(()) vec<gimple, va_gc> *varying_ssa_edges;
159 
160 
161 /* Return true if the block worklist empty.  */
162 
163 static inline bool
cfg_blocks_empty_p(void)164 cfg_blocks_empty_p (void)
165 {
166   return (cfg_blocks_num == 0);
167 }
168 
169 
170 /* Add a basic block to the worklist.  The block must not be already
171    in the worklist, and it must not be the ENTRY or EXIT block.  */
172 
173 static void
cfg_blocks_add(basic_block bb)174 cfg_blocks_add (basic_block bb)
175 {
176   bool head = false;
177 
178   gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
179   gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
180 
181   if (cfg_blocks_empty_p ())
182     {
183       cfg_blocks_tail = cfg_blocks_head = 0;
184       cfg_blocks_num = 1;
185     }
186   else
187     {
188       cfg_blocks_num++;
189       if (cfg_blocks_num > cfg_blocks.length ())
190 	{
191 	  /* We have to grow the array now.  Adjust to queue to occupy
192 	     the full space of the original array.  We do not need to
193 	     initialize the newly allocated portion of the array
194 	     because we keep track of CFG_BLOCKS_HEAD and
195 	     CFG_BLOCKS_HEAD.  */
196 	  cfg_blocks_tail = cfg_blocks.length ();
197 	  cfg_blocks_head = 0;
198 	  cfg_blocks.safe_grow (2 * cfg_blocks_tail);
199 	}
200       /* Minor optimization: we prefer to see blocks with more
201 	 predecessors later, because there is more of a chance that
202 	 the incoming edges will be executable.  */
203       else if (EDGE_COUNT (bb->preds)
204 	       >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
205 	cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
206       else
207 	{
208 	  if (cfg_blocks_head == 0)
209 	    cfg_blocks_head = cfg_blocks.length ();
210 	  --cfg_blocks_head;
211 	  head = true;
212 	}
213     }
214 
215   cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
216   bitmap_set_bit (bb_in_list, bb->index);
217 }
218 
219 
220 /* Remove a block from the worklist.  */
221 
222 static basic_block
cfg_blocks_get(void)223 cfg_blocks_get (void)
224 {
225   basic_block bb;
226 
227   bb = cfg_blocks[cfg_blocks_head];
228 
229   gcc_assert (!cfg_blocks_empty_p ());
230   gcc_assert (bb);
231 
232   cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
233   --cfg_blocks_num;
234   bitmap_clear_bit (bb_in_list, bb->index);
235 
236   return bb;
237 }
238 
239 
240 /* We have just defined a new value for VAR.  If IS_VARYING is true,
241    add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
242    them to INTERESTING_SSA_EDGES.  */
243 
244 static void
add_ssa_edge(tree var,bool is_varying)245 add_ssa_edge (tree var, bool is_varying)
246 {
247   imm_use_iterator iter;
248   use_operand_p use_p;
249 
250   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
251     {
252       gimple use_stmt = USE_STMT (use_p);
253 
254       if (prop_simulate_again_p (use_stmt)
255 	  && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
256 	{
257 	  gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
258 	  if (is_varying)
259 	    vec_safe_push (varying_ssa_edges, use_stmt);
260 	  else
261 	    vec_safe_push (interesting_ssa_edges, use_stmt);
262 	}
263     }
264 }
265 
266 
267 /* Add edge E to the control flow worklist.  */
268 
269 static void
add_control_edge(edge e)270 add_control_edge (edge e)
271 {
272   basic_block bb = e->dest;
273   if (bb == EXIT_BLOCK_PTR)
274     return;
275 
276   /* If the edge had already been executed, skip it.  */
277   if (e->flags & EDGE_EXECUTABLE)
278     return;
279 
280   e->flags |= EDGE_EXECUTABLE;
281 
282   /* If the block is already in the list, we're done.  */
283   if (bitmap_bit_p (bb_in_list, bb->index))
284     return;
285 
286   cfg_blocks_add (bb);
287 
288   if (dump_file && (dump_flags & TDF_DETAILS))
289     fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
290 	e->src->index, e->dest->index);
291 }
292 
293 
294 /* Simulate the execution of STMT and update the work lists accordingly.  */
295 
296 static void
simulate_stmt(gimple stmt)297 simulate_stmt (gimple stmt)
298 {
299   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
300   edge taken_edge = NULL;
301   tree output_name = NULL_TREE;
302 
303   /* Don't bother visiting statements that are already
304      considered varying by the propagator.  */
305   if (!prop_simulate_again_p (stmt))
306     return;
307 
308   if (gimple_code (stmt) == GIMPLE_PHI)
309     {
310       val = ssa_prop_visit_phi (stmt);
311       output_name = gimple_phi_result (stmt);
312     }
313   else
314     val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
315 
316   if (val == SSA_PROP_VARYING)
317     {
318       prop_set_simulate_again (stmt, false);
319 
320       /* If the statement produced a new varying value, add the SSA
321 	 edges coming out of OUTPUT_NAME.  */
322       if (output_name)
323 	add_ssa_edge (output_name, true);
324 
325       /* If STMT transfers control out of its basic block, add
326 	 all outgoing edges to the work list.  */
327       if (stmt_ends_bb_p (stmt))
328 	{
329 	  edge e;
330 	  edge_iterator ei;
331 	  basic_block bb = gimple_bb (stmt);
332 	  FOR_EACH_EDGE (e, ei, bb->succs)
333 	    add_control_edge (e);
334 	}
335     }
336   else if (val == SSA_PROP_INTERESTING)
337     {
338       /* If the statement produced new value, add the SSA edges coming
339 	 out of OUTPUT_NAME.  */
340       if (output_name)
341 	add_ssa_edge (output_name, false);
342 
343       /* If we know which edge is going to be taken out of this block,
344 	 add it to the CFG work list.  */
345       if (taken_edge)
346 	add_control_edge (taken_edge);
347     }
348 }
349 
350 /* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
351    drain.  This pops statements off the given WORKLIST and processes
352    them until there are no more statements on WORKLIST.
353    We take a pointer to WORKLIST because it may be reallocated when an
354    SSA edge is added to it in simulate_stmt.  */
355 
356 static void
process_ssa_edge_worklist(vec<gimple,va_gc> ** worklist)357 process_ssa_edge_worklist (vec<gimple, va_gc> **worklist)
358 {
359   /* Drain the entire worklist.  */
360   while ((*worklist)->length () > 0)
361     {
362       basic_block bb;
363 
364       /* Pull the statement to simulate off the worklist.  */
365       gimple stmt = (*worklist)->pop ();
366 
367       /* If this statement was already visited by simulate_block, then
368 	 we don't need to visit it again here.  */
369       if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
370 	continue;
371 
372       /* STMT is no longer in a worklist.  */
373       gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
374 
375       if (dump_file && (dump_flags & TDF_DETAILS))
376 	{
377 	  fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
378 	  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
379 	}
380 
381       bb = gimple_bb (stmt);
382 
383       /* PHI nodes are always visited, regardless of whether or not
384 	 the destination block is executable.  Otherwise, visit the
385 	 statement only if its block is marked executable.  */
386       if (gimple_code (stmt) == GIMPLE_PHI
387 	  || bitmap_bit_p (executable_blocks, bb->index))
388 	simulate_stmt (stmt);
389     }
390 }
391 
392 
393 /* Simulate the execution of BLOCK.  Evaluate the statement associated
394    with each variable reference inside the block.  */
395 
396 static void
simulate_block(basic_block block)397 simulate_block (basic_block block)
398 {
399   gimple_stmt_iterator gsi;
400 
401   /* There is nothing to do for the exit block.  */
402   if (block == EXIT_BLOCK_PTR)
403     return;
404 
405   if (dump_file && (dump_flags & TDF_DETAILS))
406     fprintf (dump_file, "\nSimulating block %d\n", block->index);
407 
408   /* Always simulate PHI nodes, even if we have simulated this block
409      before.  */
410   for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
411     simulate_stmt (gsi_stmt (gsi));
412 
413   /* If this is the first time we've simulated this block, then we
414      must simulate each of its statements.  */
415   if (!bitmap_bit_p (executable_blocks, block->index))
416     {
417       gimple_stmt_iterator j;
418       unsigned int normal_edge_count;
419       edge e, normal_edge;
420       edge_iterator ei;
421 
422       /* Note that we have simulated this block.  */
423       bitmap_set_bit (executable_blocks, block->index);
424 
425       for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
426 	{
427 	  gimple stmt = gsi_stmt (j);
428 
429 	  /* If this statement is already in the worklist then
430 	     "cancel" it.  The reevaluation implied by the worklist
431 	     entry will produce the same value we generate here and
432 	     thus reevaluating it again from the worklist is
433 	     pointless.  */
434 	  if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
435 	    gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
436 
437 	  simulate_stmt (stmt);
438 	}
439 
440       /* We can not predict when abnormal and EH edges will be executed, so
441 	 once a block is considered executable, we consider any
442 	 outgoing abnormal edges as executable.
443 
444 	 TODO: This is not exactly true.  Simplifying statement might
445 	 prove it non-throwing and also computed goto can be handled
446 	 when destination is known.
447 
448 	 At the same time, if this block has only one successor that is
449 	 reached by non-abnormal edges, then add that successor to the
450 	 worklist.  */
451       normal_edge_count = 0;
452       normal_edge = NULL;
453       FOR_EACH_EDGE (e, ei, block->succs)
454 	{
455 	  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
456 	    add_control_edge (e);
457 	  else
458 	    {
459 	      normal_edge_count++;
460 	      normal_edge = e;
461 	    }
462 	}
463 
464       if (normal_edge_count == 1)
465 	add_control_edge (normal_edge);
466     }
467 }
468 
469 
470 /* Initialize local data structures and work lists.  */
471 
472 static void
ssa_prop_init(void)473 ssa_prop_init (void)
474 {
475   edge e;
476   edge_iterator ei;
477   basic_block bb;
478 
479   /* Worklists of SSA edges.  */
480   vec_alloc (interesting_ssa_edges, 20);
481   vec_alloc (varying_ssa_edges, 20);
482 
483   executable_blocks = sbitmap_alloc (last_basic_block);
484   bitmap_clear (executable_blocks);
485 
486   bb_in_list = sbitmap_alloc (last_basic_block);
487   bitmap_clear (bb_in_list);
488 
489   if (dump_file && (dump_flags & TDF_DETAILS))
490     dump_immediate_uses (dump_file);
491 
492   cfg_blocks.create (20);
493   cfg_blocks.safe_grow_cleared (20);
494 
495   /* Initially assume that every edge in the CFG is not executable.
496      (including the edges coming out of ENTRY_BLOCK_PTR).  */
497   FOR_ALL_BB (bb)
498     {
499       gimple_stmt_iterator si;
500 
501       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
502 	gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
503 
504       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
505 	gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
506 
507       FOR_EACH_EDGE (e, ei, bb->succs)
508 	e->flags &= ~EDGE_EXECUTABLE;
509     }
510 
511   /* Seed the algorithm by adding the successors of the entry block to the
512      edge worklist.  */
513   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
514     add_control_edge (e);
515 }
516 
517 
518 /* Free allocated storage.  */
519 
520 static void
ssa_prop_fini(void)521 ssa_prop_fini (void)
522 {
523   vec_free (interesting_ssa_edges);
524   vec_free (varying_ssa_edges);
525   cfg_blocks.release ();
526   sbitmap_free (bb_in_list);
527   sbitmap_free (executable_blocks);
528 }
529 
530 
531 /* Return true if EXPR is an acceptable right-hand-side for a
532    GIMPLE assignment.  We validate the entire tree, not just
533    the root node, thus catching expressions that embed complex
534    operands that are not permitted in GIMPLE.  This function
535    is needed because the folding routines in fold-const.c
536    may return such expressions in some cases, e.g., an array
537    access with an embedded index addition.  It may make more
538    sense to have folding routines that are sensitive to the
539    constraints on GIMPLE operands, rather than abandoning any
540    any attempt to fold if the usual folding turns out to be too
541    aggressive.  */
542 
543 bool
valid_gimple_rhs_p(tree expr)544 valid_gimple_rhs_p (tree expr)
545 {
546   enum tree_code code = TREE_CODE (expr);
547 
548   switch (TREE_CODE_CLASS (code))
549     {
550     case tcc_declaration:
551       if (!is_gimple_variable (expr))
552 	return false;
553       break;
554 
555     case tcc_constant:
556       /* All constants are ok.  */
557       break;
558 
559     case tcc_comparison:
560       /* GENERIC allows comparisons with non-boolean types, reject
561          those for GIMPLE.  Let vector-typed comparisons pass - rules
562 	 for GENERIC and GIMPLE are the same here.  */
563       if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
564 	    && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
565 		|| TYPE_PRECISION (TREE_TYPE (expr)) == 1))
566 	  && TREE_CODE (TREE_TYPE (expr)) != VECTOR_TYPE)
567 	return false;
568 
569       /* Fallthru.  */
570     case tcc_binary:
571       if (!is_gimple_val (TREE_OPERAND (expr, 0))
572 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
573 	return false;
574       break;
575 
576     case tcc_unary:
577       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
578 	return false;
579       break;
580 
581     case tcc_expression:
582       switch (code)
583         {
584         case ADDR_EXPR:
585           {
586 	    tree t;
587 	    if (is_gimple_min_invariant (expr))
588 	      return true;
589             t = TREE_OPERAND (expr, 0);
590             while (handled_component_p (t))
591               {
592                 /* ??? More checks needed, see the GIMPLE verifier.  */
593                 if ((TREE_CODE (t) == ARRAY_REF
594                      || TREE_CODE (t) == ARRAY_RANGE_REF)
595                     && !is_gimple_val (TREE_OPERAND (t, 1)))
596                   return false;
597                 t = TREE_OPERAND (t, 0);
598               }
599             if (!is_gimple_id (t))
600               return false;
601           }
602           break;
603 
604 	default:
605 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
606 	    {
607 	      if (((code == VEC_COND_EXPR || code == COND_EXPR)
608 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
609 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
610 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
611 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
612 		return false;
613 	      break;
614 	    }
615 	  return false;
616 	}
617       break;
618 
619     case tcc_vl_exp:
620       return false;
621 
622     case tcc_exceptional:
623       if (code == CONSTRUCTOR)
624 	{
625 	  unsigned i;
626 	  tree elt;
627 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
628 	    if (!is_gimple_val (elt))
629 	      return false;
630 	  return true;
631 	}
632       if (code != SSA_NAME)
633         return false;
634       break;
635 
636     case tcc_reference:
637       if (code == BIT_FIELD_REF)
638 	return is_gimple_val (TREE_OPERAND (expr, 0));
639       return false;
640 
641     default:
642       return false;
643     }
644 
645   return true;
646 }
647 
648 
649 /* Return true if EXPR is a CALL_EXPR suitable for representation
650    as a single GIMPLE_CALL statement.  If the arguments require
651    further gimplification, return false.  */
652 
653 static bool
valid_gimple_call_p(tree expr)654 valid_gimple_call_p (tree expr)
655 {
656   unsigned i, nargs;
657 
658   if (TREE_CODE (expr) != CALL_EXPR)
659     return false;
660 
661   nargs = call_expr_nargs (expr);
662   for (i = 0; i < nargs; i++)
663     {
664       tree arg = CALL_EXPR_ARG (expr, i);
665       if (is_gimple_reg_type (arg))
666 	{
667 	  if (!is_gimple_val (arg))
668 	    return false;
669 	}
670       else
671 	if (!is_gimple_lvalue (arg))
672 	  return false;
673     }
674 
675   return true;
676 }
677 
678 
679 /* Make SSA names defined by OLD_STMT point to NEW_STMT
680    as their defining statement.  */
681 
682 void
move_ssa_defining_stmt_for_defs(gimple new_stmt,gimple old_stmt)683 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
684 {
685   tree var;
686   ssa_op_iter iter;
687 
688   if (gimple_in_ssa_p (cfun))
689     {
690       /* Make defined SSA_NAMEs point to the new
691          statement as their definition.  */
692       FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
693         {
694           if (TREE_CODE (var) == SSA_NAME)
695             SSA_NAME_DEF_STMT (var) = new_stmt;
696         }
697     }
698 }
699 
700 /* Helper function for update_gimple_call and update_call_from_tree.
701    A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT.  */
702 
703 static void
finish_update_gimple_call(gimple_stmt_iterator * si_p,gimple new_stmt,gimple stmt)704 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
705 			   gimple stmt)
706 {
707   gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
708   move_ssa_defining_stmt_for_defs (new_stmt, stmt);
709   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
710   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
711   gimple_set_location (new_stmt, gimple_location (stmt));
712   if (gimple_block (new_stmt) == NULL_TREE)
713     gimple_set_block (new_stmt, gimple_block (stmt));
714   gsi_replace (si_p, new_stmt, false);
715 }
716 
717 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
718    with number of arguments NARGS, where the arguments in GIMPLE form
719    follow NARGS argument.  */
720 
721 bool
update_gimple_call(gimple_stmt_iterator * si_p,tree fn,int nargs,...)722 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
723 {
724   va_list ap;
725   gimple new_stmt, stmt = gsi_stmt (*si_p);
726 
727   gcc_assert (is_gimple_call (stmt));
728   va_start (ap, nargs);
729   new_stmt = gimple_build_call_valist (fn, nargs, ap);
730   finish_update_gimple_call (si_p, new_stmt, stmt);
731   va_end (ap);
732   return true;
733 }
734 
735 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
736    value of EXPR, which is expected to be the result of folding the
737    call.  This can only be done if EXPR is a CALL_EXPR with valid
738    GIMPLE operands as arguments, or if it is a suitable RHS expression
739    for a GIMPLE_ASSIGN.  More complex expressions will require
740    gimplification, which will introduce additional statements.  In this
741    event, no update is performed, and the function returns false.
742    Note that we cannot mutate a GIMPLE_CALL in-place, so we always
743    replace the statement at *SI_P with an entirely new statement.
744    The new statement need not be a call, e.g., if the original call
745    folded to a constant.  */
746 
747 bool
update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)748 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
749 {
750   gimple stmt = gsi_stmt (*si_p);
751 
752   if (valid_gimple_call_p (expr))
753     {
754       /* The call has simplified to another call.  */
755       tree fn = CALL_EXPR_FN (expr);
756       unsigned i;
757       unsigned nargs = call_expr_nargs (expr);
758       vec<tree> args = vNULL;
759       gimple new_stmt;
760 
761       if (nargs > 0)
762         {
763           args.create (nargs);
764           args.safe_grow_cleared (nargs);
765 
766           for (i = 0; i < nargs; i++)
767             args[i] = CALL_EXPR_ARG (expr, i);
768         }
769 
770       new_stmt = gimple_build_call_vec (fn, args);
771       finish_update_gimple_call (si_p, new_stmt, stmt);
772       args.release ();
773 
774       return true;
775     }
776   else if (valid_gimple_rhs_p (expr))
777     {
778       tree lhs = gimple_call_lhs (stmt);
779       gimple new_stmt;
780 
781       /* The call has simplified to an expression
782          that cannot be represented as a GIMPLE_CALL. */
783       if (lhs)
784         {
785           /* A value is expected.
786              Introduce a new GIMPLE_ASSIGN statement.  */
787           STRIP_USELESS_TYPE_CONVERSION (expr);
788           new_stmt = gimple_build_assign (lhs, expr);
789           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
790 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
791 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
792         }
793       else if (!TREE_SIDE_EFFECTS (expr))
794         {
795           /* No value is expected, and EXPR has no effect.
796              Replace it with an empty statement.  */
797           new_stmt = gimple_build_nop ();
798 	  if (gimple_in_ssa_p (cfun))
799 	    {
800 	      unlink_stmt_vdef (stmt);
801 	      release_defs (stmt);
802 	    }
803         }
804       else
805         {
806           /* No value is expected, but EXPR has an effect,
807              e.g., it could be a reference to a volatile
808              variable.  Create an assignment statement
809              with a dummy (unused) lhs variable.  */
810           STRIP_USELESS_TYPE_CONVERSION (expr);
811 	  if (gimple_in_ssa_p (cfun))
812 	    lhs = make_ssa_name (TREE_TYPE (expr), NULL);
813 	  else
814 	    lhs = create_tmp_var (TREE_TYPE (expr), NULL);
815           new_stmt = gimple_build_assign (lhs, expr);
816 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
817 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
818           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
819         }
820       gimple_set_location (new_stmt, gimple_location (stmt));
821       gsi_replace (si_p, new_stmt, false);
822       return true;
823     }
824   else
825     /* The call simplified to an expression that is
826        not a valid GIMPLE RHS.  */
827     return false;
828 }
829 
830 
831 /* Entry point to the propagation engine.
832 
833    VISIT_STMT is called for every statement visited.
834    VISIT_PHI is called for every PHI node visited.  */
835 
836 void
ssa_propagate(ssa_prop_visit_stmt_fn visit_stmt,ssa_prop_visit_phi_fn visit_phi)837 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
838 	       ssa_prop_visit_phi_fn visit_phi)
839 {
840   ssa_prop_visit_stmt = visit_stmt;
841   ssa_prop_visit_phi = visit_phi;
842 
843   ssa_prop_init ();
844 
845   /* Iterate until the worklists are empty.  */
846   while (!cfg_blocks_empty_p ()
847 	 || interesting_ssa_edges->length () > 0
848 	 || varying_ssa_edges->length () > 0)
849     {
850       if (!cfg_blocks_empty_p ())
851 	{
852 	  /* Pull the next block to simulate off the worklist.  */
853 	  basic_block dest_block = cfg_blocks_get ();
854 	  simulate_block (dest_block);
855 	}
856 
857       /* In order to move things to varying as quickly as
858 	 possible,process the VARYING_SSA_EDGES worklist first.  */
859       process_ssa_edge_worklist (&varying_ssa_edges);
860 
861       /* Now process the INTERESTING_SSA_EDGES worklist.  */
862       process_ssa_edge_worklist (&interesting_ssa_edges);
863     }
864 
865   ssa_prop_fini ();
866 }
867 
868 
869 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
870    is a non-volatile pointer dereference, a structure reference or a
871    reference to a single _DECL.  Ignore volatile memory references
872    because they are not interesting for the optimizers.  */
873 
874 bool
stmt_makes_single_store(gimple stmt)875 stmt_makes_single_store (gimple stmt)
876 {
877   tree lhs;
878 
879   if (gimple_code (stmt) != GIMPLE_ASSIGN
880       && gimple_code (stmt) != GIMPLE_CALL)
881     return false;
882 
883   if (!gimple_vdef (stmt))
884     return false;
885 
886   lhs = gimple_get_lhs (stmt);
887 
888   /* A call statement may have a null LHS.  */
889   if (!lhs)
890     return false;
891 
892   return (!TREE_THIS_VOLATILE (lhs)
893           && (DECL_P (lhs)
894 	      || REFERENCE_CLASS_P (lhs)));
895 }
896 
897 
898 /* Propagation statistics.  */
899 struct prop_stats_d
900 {
901   long num_const_prop;
902   long num_copy_prop;
903   long num_stmts_folded;
904   long num_dce;
905 };
906 
907 static struct prop_stats_d prop_stats;
908 
909 /* Replace USE references in statement STMT with the values stored in
910    PROP_VALUE. Return true if at least one reference was replaced.  */
911 
912 static bool
replace_uses_in(gimple stmt,ssa_prop_get_value_fn get_value)913 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
914 {
915   bool replaced = false;
916   use_operand_p use;
917   ssa_op_iter iter;
918 
919   FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
920     {
921       tree tuse = USE_FROM_PTR (use);
922       tree val = (*get_value) (tuse);
923 
924       if (val == tuse || val == NULL_TREE)
925 	continue;
926 
927       if (gimple_code (stmt) == GIMPLE_ASM
928 	  && !may_propagate_copy_into_asm (tuse))
929 	continue;
930 
931       if (!may_propagate_copy (tuse, val))
932 	continue;
933 
934       if (TREE_CODE (val) != SSA_NAME)
935 	prop_stats.num_const_prop++;
936       else
937 	prop_stats.num_copy_prop++;
938 
939       propagate_value (use, val);
940 
941       replaced = true;
942     }
943 
944   return replaced;
945 }
946 
947 
948 /* Replace propagated values into all the arguments for PHI using the
949    values from PROP_VALUE.  */
950 
951 static void
replace_phi_args_in(gimple phi,ssa_prop_get_value_fn get_value)952 replace_phi_args_in (gimple phi, ssa_prop_get_value_fn get_value)
953 {
954   size_t i;
955   bool replaced = false;
956 
957   if (dump_file && (dump_flags & TDF_DETAILS))
958     {
959       fprintf (dump_file, "Folding PHI node: ");
960       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
961     }
962 
963   for (i = 0; i < gimple_phi_num_args (phi); i++)
964     {
965       tree arg = gimple_phi_arg_def (phi, i);
966 
967       if (TREE_CODE (arg) == SSA_NAME)
968 	{
969 	  tree val = (*get_value) (arg);
970 
971 	  if (val && val != arg && may_propagate_copy (arg, val))
972 	    {
973 	      if (TREE_CODE (val) != SSA_NAME)
974 		prop_stats.num_const_prop++;
975 	      else
976 		prop_stats.num_copy_prop++;
977 
978 	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
979 	      replaced = true;
980 
981 	      /* If we propagated a copy and this argument flows
982 		 through an abnormal edge, update the replacement
983 		 accordingly.  */
984 	      if (TREE_CODE (val) == SSA_NAME
985 		  && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL)
986 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
987 	    }
988 	}
989     }
990 
991   if (dump_file && (dump_flags & TDF_DETAILS))
992     {
993       if (!replaced)
994 	fprintf (dump_file, "No folding possible\n");
995       else
996 	{
997 	  fprintf (dump_file, "Folded into: ");
998 	  print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
999 	  fprintf (dump_file, "\n");
1000 	}
1001     }
1002 }
1003 
1004 
1005 /* Perform final substitution and folding of propagated values.
1006 
1007    PROP_VALUE[I] contains the single value that should be substituted
1008    at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1009    substituted.
1010 
1011    If FOLD_FN is non-NULL the function will be invoked on all statements
1012    before propagating values for pass specific simplification.
1013 
1014    DO_DCE is true if trivially dead stmts can be removed.
1015 
1016    If DO_DCE is true, the statements within a BB are walked from
1017    last to first element.  Otherwise we scan from first to last element.
1018 
1019    Return TRUE when something changed.  */
1020 
1021 bool
substitute_and_fold(ssa_prop_get_value_fn get_value_fn,ssa_prop_fold_stmt_fn fold_fn,bool do_dce)1022 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1023 		     ssa_prop_fold_stmt_fn fold_fn,
1024 		     bool do_dce)
1025 {
1026   basic_block bb;
1027   bool something_changed = false;
1028   unsigned i;
1029 
1030   if (!get_value_fn && !fold_fn)
1031     return false;
1032 
1033   if (dump_file && (dump_flags & TDF_DETAILS))
1034     fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1035 
1036   memset (&prop_stats, 0, sizeof (prop_stats));
1037 
1038   /* Substitute lattice values at definition sites.  */
1039   if (get_value_fn)
1040     for (i = 1; i < num_ssa_names; ++i)
1041       {
1042 	tree name = ssa_name (i);
1043 	tree val;
1044 	gimple def_stmt;
1045 	gimple_stmt_iterator gsi;
1046 
1047 	if (!name
1048 	    || virtual_operand_p (name))
1049 	  continue;
1050 
1051 	def_stmt = SSA_NAME_DEF_STMT (name);
1052 	if (gimple_nop_p (def_stmt)
1053 	    /* Do not substitute ASSERT_EXPR rhs, this will confuse VRP.  */
1054 	    || (gimple_assign_single_p (def_stmt)
1055 		&& gimple_assign_rhs_code (def_stmt) == ASSERT_EXPR)
1056 	    || !(val = (*get_value_fn) (name))
1057 	    || !may_propagate_copy (name, val))
1058 	  continue;
1059 
1060 	gsi = gsi_for_stmt (def_stmt);
1061 	if (is_gimple_assign (def_stmt))
1062 	  {
1063 	    gimple_assign_set_rhs_with_ops (&gsi, TREE_CODE (val),
1064 					    val, NULL_TREE);
1065 	    gcc_assert (gsi_stmt (gsi) == def_stmt);
1066 	    if (maybe_clean_eh_stmt (def_stmt))
1067 	      gimple_purge_dead_eh_edges (gimple_bb (def_stmt));
1068 	    update_stmt (def_stmt);
1069 	  }
1070 	else if (is_gimple_call (def_stmt))
1071 	  {
1072 	    int flags = gimple_call_flags (def_stmt);
1073 
1074 	    /* Don't optimize away calls that have side-effects.  */
1075 	    if ((flags & (ECF_CONST|ECF_PURE)) == 0
1076 		|| (flags & ECF_LOOPING_CONST_OR_PURE))
1077 	      continue;
1078 	    if (update_call_from_tree (&gsi, val)
1079 		&& maybe_clean_or_replace_eh_stmt (def_stmt, gsi_stmt (gsi)))
1080 	      gimple_purge_dead_eh_edges (gimple_bb (gsi_stmt (gsi)));
1081 	  }
1082 	else if (gimple_code (def_stmt) == GIMPLE_PHI)
1083 	  {
1084 	    gimple new_stmt = gimple_build_assign (name, val);
1085 	    gimple_stmt_iterator gsi2;
1086 	    SSA_NAME_DEF_STMT (name) = new_stmt;
1087 	    gsi2 = gsi_after_labels (gimple_bb (def_stmt));
1088 	    gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
1089 	    remove_phi_node (&gsi, false);
1090 	  }
1091 
1092 	something_changed = true;
1093       }
1094 
1095   /* Propagate into all uses and fold.  */
1096   FOR_EACH_BB (bb)
1097     {
1098       gimple_stmt_iterator i;
1099 
1100       /* Propagate known values into PHI nodes.  */
1101       if (get_value_fn)
1102 	for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
1103 	  replace_phi_args_in (gsi_stmt (i), get_value_fn);
1104 
1105       /* Propagate known values into stmts.  Do a backward walk if
1106          do_dce is true. In some case it exposes
1107 	 more trivially deletable stmts to walk backward.  */
1108       for (i = (do_dce ? gsi_last_bb (bb) : gsi_start_bb (bb)); !gsi_end_p (i);)
1109 	{
1110           bool did_replace;
1111 	  gimple stmt = gsi_stmt (i);
1112 	  gimple old_stmt;
1113 	  enum gimple_code code = gimple_code (stmt);
1114 	  gimple_stmt_iterator oldi;
1115 
1116 	  oldi = i;
1117 	  if (do_dce)
1118 	    gsi_prev (&i);
1119 	  else
1120 	    gsi_next (&i);
1121 
1122 	  /* Ignore ASSERT_EXPRs.  They are used by VRP to generate
1123 	     range information for names and they are discarded
1124 	     afterwards.  */
1125 
1126 	  if (code == GIMPLE_ASSIGN
1127 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1128 	    continue;
1129 
1130 	  /* No point propagating into a stmt whose result is not used,
1131 	     but instead we might be able to remove a trivially dead stmt.
1132 	     Don't do this when called from VRP, since the SSA_NAME which
1133 	     is going to be released could be still referenced in VRP
1134 	     ranges.  */
1135 	  if (do_dce
1136 	      && gimple_get_lhs (stmt)
1137 	      && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
1138 	      && has_zero_uses (gimple_get_lhs (stmt))
1139 	      && !stmt_could_throw_p (stmt)
1140 	      && !gimple_has_side_effects (stmt))
1141 	    {
1142 	      gimple_stmt_iterator i2;
1143 
1144 	      if (dump_file && dump_flags & TDF_DETAILS)
1145 		{
1146 		  fprintf (dump_file, "Removing dead stmt ");
1147 		  print_gimple_stmt (dump_file, stmt, 0, 0);
1148 		  fprintf (dump_file, "\n");
1149 		}
1150 	      prop_stats.num_dce++;
1151 	      i2 = gsi_for_stmt (stmt);
1152 	      gsi_remove (&i2, true);
1153 	      release_defs (stmt);
1154 	      continue;
1155 	    }
1156 
1157 	  /* Replace the statement with its folded version and mark it
1158 	     folded.  */
1159 	  did_replace = false;
1160 	  if (dump_file && (dump_flags & TDF_DETAILS))
1161 	    {
1162 	      fprintf (dump_file, "Folding statement: ");
1163 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1164 	    }
1165 
1166 	  old_stmt = stmt;
1167 
1168 	  /* Some statements may be simplified using propagator
1169 	     specific information.  Do this before propagating
1170 	     into the stmt to not disturb pass specific information.  */
1171 	  if (fold_fn
1172 	      && (*fold_fn)(&oldi))
1173 	    {
1174 	      did_replace = true;
1175 	      prop_stats.num_stmts_folded++;
1176 	      stmt = gsi_stmt (oldi);
1177 	      update_stmt (stmt);
1178 	    }
1179 
1180 	  /* Replace real uses in the statement.  */
1181 	  if (get_value_fn)
1182 	    did_replace |= replace_uses_in (stmt, get_value_fn);
1183 
1184 	  /* If we made a replacement, fold the statement.  */
1185 	  if (did_replace)
1186 	    fold_stmt (&oldi);
1187 
1188 	  /* Now cleanup.  */
1189 	  if (did_replace)
1190 	    {
1191 	      stmt = gsi_stmt (oldi);
1192 
1193               /* If we cleaned up EH information from the statement,
1194                  remove EH edges.  */
1195 	      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1196 		gimple_purge_dead_eh_edges (bb);
1197 
1198               if (is_gimple_assign (stmt)
1199                   && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1200                       == GIMPLE_SINGLE_RHS))
1201               {
1202                 tree rhs = gimple_assign_rhs1 (stmt);
1203 
1204                 if (TREE_CODE (rhs) == ADDR_EXPR)
1205                   recompute_tree_invariant_for_addr_expr (rhs);
1206               }
1207 
1208 	      /* Determine what needs to be done to update the SSA form.  */
1209 	      update_stmt (stmt);
1210 	      if (!is_gimple_debug (stmt))
1211 		something_changed = true;
1212 	    }
1213 
1214 	  if (dump_file && (dump_flags & TDF_DETAILS))
1215 	    {
1216 	      if (did_replace)
1217 		{
1218 		  fprintf (dump_file, "Folded into: ");
1219 		  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1220 		  fprintf (dump_file, "\n");
1221 		}
1222 	      else
1223 		fprintf (dump_file, "Not folded\n");
1224 	    }
1225 	}
1226     }
1227 
1228   statistics_counter_event (cfun, "Constants propagated",
1229 			    prop_stats.num_const_prop);
1230   statistics_counter_event (cfun, "Copies propagated",
1231 			    prop_stats.num_copy_prop);
1232   statistics_counter_event (cfun, "Statements folded",
1233 			    prop_stats.num_stmts_folded);
1234   statistics_counter_event (cfun, "Statements deleted",
1235 			    prop_stats.num_dce);
1236   return something_changed;
1237 }
1238 
1239 #include "gt-tree-ssa-propagate.h"
1240