1 /* Generic SSA value propagation engine.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by the
9    Free Software Foundation; either version 3, or (at your option) any
10    later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "dumpfile.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "domwalk.h"
38 #include "cfgloop.h"
39 #include "tree-cfgcleanup.h"
40 #include "cfganal.h"
41 
42 /* This file implements a generic value propagation engine based on
43    the same propagation used by the SSA-CCP algorithm [1].
44 
45    Propagation is performed by simulating the execution of every
46    statement that produces the value being propagated.  Simulation
47    proceeds as follows:
48 
49    1- Initially, all edges of the CFG are marked not executable and
50       the CFG worklist is seeded with all the statements in the entry
51       basic block (block 0).
52 
53    2- Every statement S is simulated with a call to the call-back
54       function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
55       results:
56 
57       	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
58 	    interest and does not affect any of the work lists.
59 	    The statement may be simulated again if any of its input
60 	    operands change in future iterations of the simulator.
61 
62 	SSA_PROP_VARYING: The value produced by S cannot be determined
63 	    at compile time.  Further simulation of S is not required.
64 	    If S is a conditional jump, all the outgoing edges for the
65 	    block are considered executable and added to the work
66 	    list.
67 
68 	SSA_PROP_INTERESTING: S produces a value that can be computed
69 	    at compile time.  Its result can be propagated into the
70 	    statements that feed from S.  Furthermore, if S is a
71 	    conditional jump, only the edge known to be taken is added
72 	    to the work list.  Edges that are known not to execute are
73 	    never simulated.
74 
75    3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
76       return value from SSA_PROP_VISIT_PHI has the same semantics as
77       described in #2.
78 
79    4- Three work lists are kept.  Statements are only added to these
80       lists if they produce one of SSA_PROP_INTERESTING or
81       SSA_PROP_VARYING.
82 
83    	CFG_BLOCKS contains the list of blocks to be simulated.
84 	    Blocks are added to this list if their incoming edges are
85 	    found executable.
86 
87 	SSA_EDGE_WORKLIST contains the list of statements that we
88 	    need to revisit.
89 
90    5- Simulation terminates when all three work lists are drained.
91 
92    Before calling ssa_propagate, it is important to clear
93    prop_simulate_again_p for all the statements in the program that
94    should be simulated.  This initialization allows an implementation
95    to specify which statements should never be simulated.
96 
97    It is also important to compute def-use information before calling
98    ssa_propagate.
99 
100    References:
101 
102      [1] Constant propagation with conditional branches,
103          Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
104 
105      [2] Building an Optimizing Compiler,
106 	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
107 
108      [3] Advanced Compiler Design and Implementation,
109 	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
110 
111 /* Worklist of control flow edge destinations.  This contains
112    the CFG order number of the blocks so we can iterate in CFG
113    order by visiting in bit-order.  */
114 static bitmap cfg_blocks;
115 static int *bb_to_cfg_order;
116 static int *cfg_order_to_bb;
117 
118 /* Worklist of SSA edges which will need reexamination as their
119    definition has changed.  SSA edges are def-use edges in the SSA
120    web.  For each D-U edge, we store the target statement or PHI node
121    UID in a bitmap.  UIDs order stmts in execution order.   */
122 static bitmap ssa_edge_worklist;
123 static vec<gimple *> uid_to_stmt;
124 
125 /* Return true if the block worklist empty.  */
126 
127 static inline bool
128 cfg_blocks_empty_p (void)
129 {
130   return bitmap_empty_p (cfg_blocks);
131 }
132 
133 
134 /* Add a basic block to the worklist.  The block must not be the ENTRY
135    or EXIT block.  */
136 
137 static void
138 cfg_blocks_add (basic_block bb)
139 {
140   gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
141 	      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
142   bitmap_set_bit (cfg_blocks, bb_to_cfg_order[bb->index]);
143 }
144 
145 
146 /* Remove a block from the worklist.  */
147 
148 static basic_block
149 cfg_blocks_get (void)
150 {
151   gcc_assert (!cfg_blocks_empty_p ());
152   int order_index = bitmap_first_set_bit (cfg_blocks);
153   bitmap_clear_bit (cfg_blocks, order_index);
154   return BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [order_index]);
155 }
156 
157 
158 /* We have just defined a new value for VAR.  If IS_VARYING is true,
159    add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
160    them to INTERESTING_SSA_EDGES.  */
161 
162 static void
163 add_ssa_edge (tree var)
164 {
165   imm_use_iterator iter;
166   use_operand_p use_p;
167 
168   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
169     {
170       gimple *use_stmt = USE_STMT (use_p);
171 
172       /* If we did not yet simulate the block wait for this to happen
173          and do not add the stmt to the SSA edge worklist.  */
174       if (! (gimple_bb (use_stmt)->flags & BB_VISITED))
175 	continue;
176 
177       if (prop_simulate_again_p (use_stmt)
178 	  && bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
179 	{
180 	  uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
181 	  if (dump_file && (dump_flags & TDF_DETAILS))
182 	    {
183 	      fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
184 	      print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
185 	    }
186 	}
187     }
188 }
189 
190 
191 /* Add edge E to the control flow worklist.  */
192 
193 static void
194 add_control_edge (edge e)
195 {
196   basic_block bb = e->dest;
197   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
198     return;
199 
200   /* If the edge had already been executed, skip it.  */
201   if (e->flags & EDGE_EXECUTABLE)
202     return;
203 
204   e->flags |= EDGE_EXECUTABLE;
205 
206   cfg_blocks_add (bb);
207 
208   if (dump_file && (dump_flags & TDF_DETAILS))
209     fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
210 	e->src->index, e->dest->index);
211 }
212 
213 
214 /* Simulate the execution of STMT and update the work lists accordingly.  */
215 
216 void
217 ssa_propagation_engine::simulate_stmt (gimple *stmt)
218 {
219   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
220   edge taken_edge = NULL;
221   tree output_name = NULL_TREE;
222 
223   /* Pull the stmt off the SSA edge worklist.  */
224   bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
225 
226   /* Don't bother visiting statements that are already
227      considered varying by the propagator.  */
228   if (!prop_simulate_again_p (stmt))
229     return;
230 
231   if (gimple_code (stmt) == GIMPLE_PHI)
232     {
233       val = visit_phi (as_a <gphi *> (stmt));
234       output_name = gimple_phi_result (stmt);
235     }
236   else
237     val = visit_stmt (stmt, &taken_edge, &output_name);
238 
239   if (val == SSA_PROP_VARYING)
240     {
241       prop_set_simulate_again (stmt, false);
242 
243       /* If the statement produced a new varying value, add the SSA
244 	 edges coming out of OUTPUT_NAME.  */
245       if (output_name)
246 	add_ssa_edge (output_name);
247 
248       /* If STMT transfers control out of its basic block, add
249 	 all outgoing edges to the work list.  */
250       if (stmt_ends_bb_p (stmt))
251 	{
252 	  edge e;
253 	  edge_iterator ei;
254 	  basic_block bb = gimple_bb (stmt);
255 	  FOR_EACH_EDGE (e, ei, bb->succs)
256 	    add_control_edge (e);
257 	}
258       return;
259     }
260   else if (val == SSA_PROP_INTERESTING)
261     {
262       /* If the statement produced new value, add the SSA edges coming
263 	 out of OUTPUT_NAME.  */
264       if (output_name)
265 	add_ssa_edge (output_name);
266 
267       /* If we know which edge is going to be taken out of this block,
268 	 add it to the CFG work list.  */
269       if (taken_edge)
270 	add_control_edge (taken_edge);
271     }
272 
273   /* If there are no SSA uses on the stmt whose defs are simulated
274      again then this stmt will be never visited again.  */
275   bool has_simulate_again_uses = false;
276   use_operand_p use_p;
277   ssa_op_iter iter;
278   if (gimple_code  (stmt) == GIMPLE_PHI)
279     {
280       edge_iterator ei;
281       edge e;
282       tree arg;
283       FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
284 	if (!(e->flags & EDGE_EXECUTABLE)
285 	    || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
286 		&& TREE_CODE (arg) == SSA_NAME
287 		&& !SSA_NAME_IS_DEFAULT_DEF (arg)
288 		&& prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
289 	  {
290 	    has_simulate_again_uses = true;
291 	    break;
292 	  }
293     }
294   else
295     FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
296       {
297 	gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
298 	if (!gimple_nop_p (def_stmt)
299 	    && prop_simulate_again_p (def_stmt))
300 	  {
301 	    has_simulate_again_uses = true;
302 	    break;
303 	  }
304       }
305   if (!has_simulate_again_uses)
306     {
307       if (dump_file && (dump_flags & TDF_DETAILS))
308 	fprintf (dump_file, "marking stmt to be not simulated again\n");
309       prop_set_simulate_again (stmt, false);
310     }
311 }
312 
313 /* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
314    drain.  This pops statements off the given WORKLIST and processes
315    them until one statement was simulated or there are no more statements
316    on WORKLIST.  We take a pointer to WORKLIST because it may be reallocated
317    when an SSA edge is added to it in simulate_stmt.  Return true if a stmt
318    was simulated.  */
319 
320 void
321 ssa_propagation_engine::process_ssa_edge_worklist (void)
322 {
323   /* Process the next entry from the worklist.  */
324   unsigned stmt_uid = bitmap_first_set_bit (ssa_edge_worklist);
325   bitmap_clear_bit (ssa_edge_worklist, stmt_uid);
326   gimple *stmt = uid_to_stmt[stmt_uid];
327 
328   /* We should not have stmts in not yet simulated BBs on the worklist.  */
329   gcc_assert (gimple_bb (stmt)->flags & BB_VISITED);
330 
331   if (dump_file && (dump_flags & TDF_DETAILS))
332     {
333       fprintf (dump_file, "\nSimulating statement: ");
334       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
335     }
336 
337   simulate_stmt (stmt);
338 }
339 
340 
341 /* Simulate the execution of BLOCK.  Evaluate the statement associated
342    with each variable reference inside the block.  */
343 
344 void
345 ssa_propagation_engine::simulate_block (basic_block block)
346 {
347   gimple_stmt_iterator gsi;
348 
349   /* There is nothing to do for the exit block.  */
350   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
351     return;
352 
353   if (dump_file && (dump_flags & TDF_DETAILS))
354     fprintf (dump_file, "\nSimulating block %d\n", block->index);
355 
356   /* Always simulate PHI nodes, even if we have simulated this block
357      before.  */
358   for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
359     simulate_stmt (gsi_stmt (gsi));
360 
361   /* If this is the first time we've simulated this block, then we
362      must simulate each of its statements.  */
363   if (! (block->flags & BB_VISITED))
364     {
365       gimple_stmt_iterator j;
366       unsigned int normal_edge_count;
367       edge e, normal_edge;
368       edge_iterator ei;
369 
370       for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
371 	simulate_stmt (gsi_stmt (j));
372 
373       /* Note that we have simulated this block.  */
374       block->flags |= BB_VISITED;
375 
376       /* We can not predict when abnormal and EH edges will be executed, so
377 	 once a block is considered executable, we consider any
378 	 outgoing abnormal edges as executable.
379 
380 	 TODO: This is not exactly true.  Simplifying statement might
381 	 prove it non-throwing and also computed goto can be handled
382 	 when destination is known.
383 
384 	 At the same time, if this block has only one successor that is
385 	 reached by non-abnormal edges, then add that successor to the
386 	 worklist.  */
387       normal_edge_count = 0;
388       normal_edge = NULL;
389       FOR_EACH_EDGE (e, ei, block->succs)
390 	{
391 	  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
392 	    add_control_edge (e);
393 	  else
394 	    {
395 	      normal_edge_count++;
396 	      normal_edge = e;
397 	    }
398 	}
399 
400       if (normal_edge_count == 1)
401 	add_control_edge (normal_edge);
402     }
403 }
404 
405 
406 /* Initialize local data structures and work lists.  */
407 
408 static void
409 ssa_prop_init (void)
410 {
411   edge e;
412   edge_iterator ei;
413   basic_block bb;
414 
415   /* Worklists of SSA edges.  */
416   ssa_edge_worklist = BITMAP_ALLOC (NULL);
417 
418   /* Worklist of basic-blocks.  */
419   bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
420   cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
421   int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
422 					     cfg_order_to_bb, false);
423   for (int i = 0; i < n; ++i)
424     bb_to_cfg_order[cfg_order_to_bb[i]] = i;
425   cfg_blocks = BITMAP_ALLOC (NULL);
426 
427   if (dump_file && (dump_flags & TDF_DETAILS))
428     dump_immediate_uses (dump_file);
429 
430   /* Initially assume that every edge in the CFG is not executable.
431      (including the edges coming out of the entry block).  Mark blocks
432      as not visited, blocks not yet visited will have all their statements
433      simulated once an incoming edge gets executable.  */
434   set_gimple_stmt_max_uid (cfun, 0);
435   for (int i = 0; i < n; ++i)
436     {
437       gimple_stmt_iterator si;
438       bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
439 
440       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
441 	{
442 	  gimple *stmt = gsi_stmt (si);
443 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
444 	}
445 
446       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
447 	{
448 	  gimple *stmt = gsi_stmt (si);
449 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
450 	}
451 
452       bb->flags &= ~BB_VISITED;
453       FOR_EACH_EDGE (e, ei, bb->succs)
454 	e->flags &= ~EDGE_EXECUTABLE;
455     }
456   uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun));
457 
458   /* Seed the algorithm by adding the successors of the entry block to the
459      edge worklist.  */
460   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
461     {
462       e->flags &= ~EDGE_EXECUTABLE;
463       add_control_edge (e);
464     }
465 }
466 
467 
468 /* Free allocated storage.  */
469 
470 static void
471 ssa_prop_fini (void)
472 {
473   BITMAP_FREE (cfg_blocks);
474   free (bb_to_cfg_order);
475   free (cfg_order_to_bb);
476   BITMAP_FREE (ssa_edge_worklist);
477   uid_to_stmt.release ();
478 }
479 
480 
481 /* Return true if EXPR is an acceptable right-hand-side for a
482    GIMPLE assignment.  We validate the entire tree, not just
483    the root node, thus catching expressions that embed complex
484    operands that are not permitted in GIMPLE.  This function
485    is needed because the folding routines in fold-const.c
486    may return such expressions in some cases, e.g., an array
487    access with an embedded index addition.  It may make more
488    sense to have folding routines that are sensitive to the
489    constraints on GIMPLE operands, rather than abandoning any
490    any attempt to fold if the usual folding turns out to be too
491    aggressive.  */
492 
493 bool
494 valid_gimple_rhs_p (tree expr)
495 {
496   enum tree_code code = TREE_CODE (expr);
497 
498   switch (TREE_CODE_CLASS (code))
499     {
500     case tcc_declaration:
501       if (!is_gimple_variable (expr))
502 	return false;
503       break;
504 
505     case tcc_constant:
506       /* All constants are ok.  */
507       break;
508 
509     case tcc_comparison:
510       /* GENERIC allows comparisons with non-boolean types, reject
511          those for GIMPLE.  Let vector-typed comparisons pass - rules
512 	 for GENERIC and GIMPLE are the same here.  */
513       if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
514 	    && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
515 		|| TYPE_PRECISION (TREE_TYPE (expr)) == 1))
516 	  && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
517 	return false;
518 
519       /* Fallthru.  */
520     case tcc_binary:
521       if (!is_gimple_val (TREE_OPERAND (expr, 0))
522 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
523 	return false;
524       break;
525 
526     case tcc_unary:
527       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
528 	return false;
529       break;
530 
531     case tcc_expression:
532       switch (code)
533         {
534         case ADDR_EXPR:
535           {
536 	    tree t;
537 	    if (is_gimple_min_invariant (expr))
538 	      return true;
539             t = TREE_OPERAND (expr, 0);
540             while (handled_component_p (t))
541               {
542                 /* ??? More checks needed, see the GIMPLE verifier.  */
543                 if ((TREE_CODE (t) == ARRAY_REF
544                      || TREE_CODE (t) == ARRAY_RANGE_REF)
545                     && !is_gimple_val (TREE_OPERAND (t, 1)))
546                   return false;
547                 t = TREE_OPERAND (t, 0);
548               }
549             if (!is_gimple_id (t))
550               return false;
551           }
552           break;
553 
554 	default:
555 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
556 	    {
557 	      if (((code == VEC_COND_EXPR || code == COND_EXPR)
558 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
559 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
560 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
561 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
562 		return false;
563 	      break;
564 	    }
565 	  return false;
566 	}
567       break;
568 
569     case tcc_vl_exp:
570       return false;
571 
572     case tcc_exceptional:
573       if (code == CONSTRUCTOR)
574 	{
575 	  unsigned i;
576 	  tree elt;
577 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
578 	    if (!is_gimple_val (elt))
579 	      return false;
580 	  return true;
581 	}
582       if (code != SSA_NAME)
583         return false;
584       break;
585 
586     case tcc_reference:
587       if (code == BIT_FIELD_REF)
588 	return is_gimple_val (TREE_OPERAND (expr, 0));
589       return false;
590 
591     default:
592       return false;
593     }
594 
595   return true;
596 }
597 
598 
599 /* Return true if EXPR is a CALL_EXPR suitable for representation
600    as a single GIMPLE_CALL statement.  If the arguments require
601    further gimplification, return false.  */
602 
603 static bool
604 valid_gimple_call_p (tree expr)
605 {
606   unsigned i, nargs;
607 
608   if (TREE_CODE (expr) != CALL_EXPR)
609     return false;
610 
611   nargs = call_expr_nargs (expr);
612   for (i = 0; i < nargs; i++)
613     {
614       tree arg = CALL_EXPR_ARG (expr, i);
615       if (is_gimple_reg_type (TREE_TYPE (arg)))
616 	{
617 	  if (!is_gimple_val (arg))
618 	    return false;
619 	}
620       else
621 	if (!is_gimple_lvalue (arg))
622 	  return false;
623     }
624 
625   return true;
626 }
627 
628 
629 /* Make SSA names defined by OLD_STMT point to NEW_STMT
630    as their defining statement.  */
631 
632 void
633 move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
634 {
635   tree var;
636   ssa_op_iter iter;
637 
638   if (gimple_in_ssa_p (cfun))
639     {
640       /* Make defined SSA_NAMEs point to the new
641          statement as their definition.  */
642       FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
643         {
644           if (TREE_CODE (var) == SSA_NAME)
645             SSA_NAME_DEF_STMT (var) = new_stmt;
646         }
647     }
648 }
649 
650 /* Helper function for update_gimple_call and update_call_from_tree.
651    A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT.  */
652 
653 static void
654 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
655 			   gimple *stmt)
656 {
657   gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
658   move_ssa_defining_stmt_for_defs (new_stmt, stmt);
659   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
660   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
661   gimple_set_location (new_stmt, gimple_location (stmt));
662   if (gimple_block (new_stmt) == NULL_TREE)
663     gimple_set_block (new_stmt, gimple_block (stmt));
664   gsi_replace (si_p, new_stmt, false);
665 }
666 
667 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
668    with number of arguments NARGS, where the arguments in GIMPLE form
669    follow NARGS argument.  */
670 
671 bool
672 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
673 {
674   va_list ap;
675   gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
676 
677   gcc_assert (is_gimple_call (stmt));
678   va_start (ap, nargs);
679   new_stmt = gimple_build_call_valist (fn, nargs, ap);
680   finish_update_gimple_call (si_p, new_stmt, stmt);
681   va_end (ap);
682   return true;
683 }
684 
685 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
686    value of EXPR, which is expected to be the result of folding the
687    call.  This can only be done if EXPR is a CALL_EXPR with valid
688    GIMPLE operands as arguments, or if it is a suitable RHS expression
689    for a GIMPLE_ASSIGN.  More complex expressions will require
690    gimplification, which will introduce additional statements.  In this
691    event, no update is performed, and the function returns false.
692    Note that we cannot mutate a GIMPLE_CALL in-place, so we always
693    replace the statement at *SI_P with an entirely new statement.
694    The new statement need not be a call, e.g., if the original call
695    folded to a constant.  */
696 
697 bool
698 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
699 {
700   gimple *stmt = gsi_stmt (*si_p);
701 
702   if (valid_gimple_call_p (expr))
703     {
704       /* The call has simplified to another call.  */
705       tree fn = CALL_EXPR_FN (expr);
706       unsigned i;
707       unsigned nargs = call_expr_nargs (expr);
708       vec<tree> args = vNULL;
709       gcall *new_stmt;
710 
711       if (nargs > 0)
712         {
713           args.create (nargs);
714           args.safe_grow_cleared (nargs);
715 
716           for (i = 0; i < nargs; i++)
717             args[i] = CALL_EXPR_ARG (expr, i);
718         }
719 
720       new_stmt = gimple_build_call_vec (fn, args);
721       finish_update_gimple_call (si_p, new_stmt, stmt);
722       args.release ();
723 
724       return true;
725     }
726   else if (valid_gimple_rhs_p (expr))
727     {
728       tree lhs = gimple_call_lhs (stmt);
729       gimple *new_stmt;
730 
731       /* The call has simplified to an expression
732          that cannot be represented as a GIMPLE_CALL. */
733       if (lhs)
734         {
735           /* A value is expected.
736              Introduce a new GIMPLE_ASSIGN statement.  */
737           STRIP_USELESS_TYPE_CONVERSION (expr);
738           new_stmt = gimple_build_assign (lhs, expr);
739           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
740 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
741 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
742         }
743       else if (!TREE_SIDE_EFFECTS (expr))
744         {
745           /* No value is expected, and EXPR has no effect.
746              Replace it with an empty statement.  */
747           new_stmt = gimple_build_nop ();
748 	  if (gimple_in_ssa_p (cfun))
749 	    {
750 	      unlink_stmt_vdef (stmt);
751 	      release_defs (stmt);
752 	    }
753         }
754       else
755         {
756           /* No value is expected, but EXPR has an effect,
757              e.g., it could be a reference to a volatile
758              variable.  Create an assignment statement
759              with a dummy (unused) lhs variable.  */
760           STRIP_USELESS_TYPE_CONVERSION (expr);
761 	  if (gimple_in_ssa_p (cfun))
762 	    lhs = make_ssa_name (TREE_TYPE (expr));
763 	  else
764 	    lhs = create_tmp_var (TREE_TYPE (expr));
765           new_stmt = gimple_build_assign (lhs, expr);
766 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
767 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
768           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
769         }
770       gimple_set_location (new_stmt, gimple_location (stmt));
771       gsi_replace (si_p, new_stmt, false);
772       return true;
773     }
774   else
775     /* The call simplified to an expression that is
776        not a valid GIMPLE RHS.  */
777     return false;
778 }
779 
780 /* Entry point to the propagation engine.
781 
782    The VISIT_STMT virtual function is called for every statement
783    visited and the VISIT_PHI virtual function is called for every PHI
784    node visited.  */
785 
786 void
787 ssa_propagation_engine::ssa_propagate (void)
788 {
789   ssa_prop_init ();
790 
791   /* Iterate until the worklists are empty.  */
792   while (! cfg_blocks_empty_p ()
793 	 || ! bitmap_empty_p (ssa_edge_worklist))
794     {
795       /* First simulate whole blocks.  */
796       if (! cfg_blocks_empty_p ())
797 	{
798 	  /* Pull the next block to simulate off the worklist.  */
799 	  basic_block dest_block = cfg_blocks_get ();
800 	  simulate_block (dest_block);
801 	  continue;
802 	}
803 
804       /* Then simulate from the SSA edge worklist.  */
805       process_ssa_edge_worklist ();
806     }
807 
808   ssa_prop_fini ();
809 }
810 
811 
812 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
813    is a non-volatile pointer dereference, a structure reference or a
814    reference to a single _DECL.  Ignore volatile memory references
815    because they are not interesting for the optimizers.  */
816 
817 bool
818 stmt_makes_single_store (gimple *stmt)
819 {
820   tree lhs;
821 
822   if (gimple_code (stmt) != GIMPLE_ASSIGN
823       && gimple_code (stmt) != GIMPLE_CALL)
824     return false;
825 
826   if (!gimple_vdef (stmt))
827     return false;
828 
829   lhs = gimple_get_lhs (stmt);
830 
831   /* A call statement may have a null LHS.  */
832   if (!lhs)
833     return false;
834 
835   return (!TREE_THIS_VOLATILE (lhs)
836           && (DECL_P (lhs)
837 	      || REFERENCE_CLASS_P (lhs)));
838 }
839 
840 
841 /* Propagation statistics.  */
842 struct prop_stats_d
843 {
844   long num_const_prop;
845   long num_copy_prop;
846   long num_stmts_folded;
847   long num_dce;
848 };
849 
850 static struct prop_stats_d prop_stats;
851 
852 /* Replace USE references in statement STMT with the values stored in
853    PROP_VALUE. Return true if at least one reference was replaced.  */
854 
855 bool
856 substitute_and_fold_engine::replace_uses_in (gimple *stmt)
857 {
858   bool replaced = false;
859   use_operand_p use;
860   ssa_op_iter iter;
861 
862   FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
863     {
864       tree tuse = USE_FROM_PTR (use);
865       tree val = get_value (tuse);
866 
867       if (val == tuse || val == NULL_TREE)
868 	continue;
869 
870       if (gimple_code (stmt) == GIMPLE_ASM
871 	  && !may_propagate_copy_into_asm (tuse))
872 	continue;
873 
874       if (!may_propagate_copy (tuse, val))
875 	continue;
876 
877       if (TREE_CODE (val) != SSA_NAME)
878 	prop_stats.num_const_prop++;
879       else
880 	prop_stats.num_copy_prop++;
881 
882       propagate_value (use, val);
883 
884       replaced = true;
885     }
886 
887   return replaced;
888 }
889 
890 
891 /* Replace propagated values into all the arguments for PHI using the
892    values from PROP_VALUE.  */
893 
894 bool
895 substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
896 {
897   size_t i;
898   bool replaced = false;
899 
900   if (dump_file && (dump_flags & TDF_DETAILS))
901     {
902       fprintf (dump_file, "Folding PHI node: ");
903       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
904     }
905 
906   for (i = 0; i < gimple_phi_num_args (phi); i++)
907     {
908       tree arg = gimple_phi_arg_def (phi, i);
909 
910       if (TREE_CODE (arg) == SSA_NAME)
911 	{
912 	  tree val = get_value (arg);
913 
914 	  if (val && val != arg && may_propagate_copy (arg, val))
915 	    {
916 	      edge e = gimple_phi_arg_edge (phi, i);
917 
918 	      if (TREE_CODE (val) != SSA_NAME)
919 		prop_stats.num_const_prop++;
920 	      else
921 		prop_stats.num_copy_prop++;
922 
923 	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
924 	      replaced = true;
925 
926 	      /* If we propagated a copy and this argument flows
927 		 through an abnormal edge, update the replacement
928 		 accordingly.  */
929 	      if (TREE_CODE (val) == SSA_NAME
930 		  && e->flags & EDGE_ABNORMAL
931 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
932 		{
933 		  /* This can only occur for virtual operands, since
934 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
935 		     would prevent replacement.  */
936 		  gcc_checking_assert (virtual_operand_p (val));
937 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
938 		}
939 	    }
940 	}
941     }
942 
943   if (dump_file && (dump_flags & TDF_DETAILS))
944     {
945       if (!replaced)
946 	fprintf (dump_file, "No folding possible\n");
947       else
948 	{
949 	  fprintf (dump_file, "Folded into: ");
950 	  print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
951 	  fprintf (dump_file, "\n");
952 	}
953     }
954 
955   return replaced;
956 }
957 
958 
959 class substitute_and_fold_dom_walker : public dom_walker
960 {
961 public:
962     substitute_and_fold_dom_walker (cdi_direction direction,
963 				    class substitute_and_fold_engine *engine)
964 	: dom_walker (direction),
965           something_changed (false),
966 	  substitute_and_fold_engine (engine)
967     {
968       stmts_to_remove.create (0);
969       stmts_to_fixup.create (0);
970       need_eh_cleanup = BITMAP_ALLOC (NULL);
971     }
972     ~substitute_and_fold_dom_walker ()
973     {
974       stmts_to_remove.release ();
975       stmts_to_fixup.release ();
976       BITMAP_FREE (need_eh_cleanup);
977     }
978 
979     virtual edge before_dom_children (basic_block);
980     virtual void after_dom_children (basic_block) {}
981 
982     bool something_changed;
983     vec<gimple *> stmts_to_remove;
984     vec<gimple *> stmts_to_fixup;
985     bitmap need_eh_cleanup;
986 
987     class substitute_and_fold_engine *substitute_and_fold_engine;
988 };
989 
990 edge
991 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
992 {
993   /* Propagate known values into PHI nodes.  */
994   for (gphi_iterator i = gsi_start_phis (bb);
995        !gsi_end_p (i);
996        gsi_next (&i))
997     {
998       gphi *phi = i.phi ();
999       tree res = gimple_phi_result (phi);
1000       if (virtual_operand_p (res))
1001 	continue;
1002       if (res && TREE_CODE (res) == SSA_NAME)
1003 	{
1004 	  tree sprime = substitute_and_fold_engine->get_value (res);
1005 	  if (sprime
1006 	      && sprime != res
1007 	      && may_propagate_copy (res, sprime))
1008 	    {
1009 	      stmts_to_remove.safe_push (phi);
1010 	      continue;
1011 	    }
1012 	}
1013       something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
1014     }
1015 
1016   /* Propagate known values into stmts.  In some case it exposes
1017      more trivially deletable stmts to walk backward.  */
1018   for (gimple_stmt_iterator i = gsi_start_bb (bb);
1019        !gsi_end_p (i);
1020        gsi_next (&i))
1021     {
1022       bool did_replace;
1023       gimple *stmt = gsi_stmt (i);
1024 
1025       /* No point propagating into a stmt we have a value for we
1026          can propagate into all uses.  Mark it for removal instead.  */
1027       tree lhs = gimple_get_lhs (stmt);
1028       if (lhs && TREE_CODE (lhs) == SSA_NAME)
1029 	{
1030 	  tree sprime = substitute_and_fold_engine->get_value (lhs);
1031 	  if (sprime
1032 	      && sprime != lhs
1033 	      && may_propagate_copy (lhs, sprime)
1034 	      && !stmt_could_throw_p (stmt)
1035 	      && !gimple_has_side_effects (stmt)
1036 	      /* We have to leave ASSERT_EXPRs around for jump-threading.  */
1037 	      && (!is_gimple_assign (stmt)
1038 		  || gimple_assign_rhs_code (stmt) != ASSERT_EXPR))
1039 	    {
1040 	      stmts_to_remove.safe_push (stmt);
1041 	      continue;
1042 	    }
1043 	}
1044 
1045       /* Replace the statement with its folded version and mark it
1046 	 folded.  */
1047       did_replace = false;
1048       if (dump_file && (dump_flags & TDF_DETAILS))
1049 	{
1050 	  fprintf (dump_file, "Folding statement: ");
1051 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1052 	}
1053 
1054       gimple *old_stmt = stmt;
1055       bool was_noreturn = (is_gimple_call (stmt)
1056 			   && gimple_call_noreturn_p (stmt));
1057 
1058       /* Replace real uses in the statement.  */
1059       did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
1060 
1061       /* If we made a replacement, fold the statement.  */
1062       if (did_replace)
1063 	{
1064 	  fold_stmt (&i, follow_single_use_edges);
1065 	  stmt = gsi_stmt (i);
1066 	  gimple_set_modified (stmt, true);
1067 	}
1068 
1069       /* Some statements may be simplified using propagator
1070 	 specific information.  Do this before propagating
1071 	 into the stmt to not disturb pass specific information.  */
1072       update_stmt_if_modified (stmt);
1073       if (substitute_and_fold_engine->fold_stmt(&i))
1074 	{
1075 	  did_replace = true;
1076 	  prop_stats.num_stmts_folded++;
1077 	  stmt = gsi_stmt (i);
1078 	  gimple_set_modified (stmt, true);
1079 	}
1080 
1081       /* If this is a control statement the propagator left edges
1082          unexecuted on force the condition in a way consistent with
1083 	 that.  See PR66945 for cases where the propagator can end
1084 	 up with a different idea of a taken edge than folding
1085 	 (once undefined behavior is involved).  */
1086       if (gimple_code (stmt) == GIMPLE_COND)
1087 	{
1088 	  if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
1089 	      ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
1090 	    {
1091 	      if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
1092 		  == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
1093 		gimple_cond_make_true (as_a <gcond *> (stmt));
1094 	      else
1095 		gimple_cond_make_false (as_a <gcond *> (stmt));
1096 	      gimple_set_modified (stmt, true);
1097 	      did_replace = true;
1098 	    }
1099 	}
1100 
1101       /* Now cleanup.  */
1102       if (did_replace)
1103 	{
1104 	  /* If we cleaned up EH information from the statement,
1105 	     remove EH edges.  */
1106 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1107 	    bitmap_set_bit (need_eh_cleanup, bb->index);
1108 
1109 	  /* If we turned a not noreturn call into a noreturn one
1110 	     schedule it for fixup.  */
1111 	  if (!was_noreturn
1112 	      && is_gimple_call (stmt)
1113 	      && gimple_call_noreturn_p (stmt))
1114 	    stmts_to_fixup.safe_push (stmt);
1115 
1116 	  if (gimple_assign_single_p (stmt))
1117 	    {
1118 	      tree rhs = gimple_assign_rhs1 (stmt);
1119 
1120 	      if (TREE_CODE (rhs) == ADDR_EXPR)
1121 		recompute_tree_invariant_for_addr_expr (rhs);
1122 	    }
1123 
1124 	  /* Determine what needs to be done to update the SSA form.  */
1125 	  update_stmt_if_modified (stmt);
1126 	  if (!is_gimple_debug (stmt))
1127 	    something_changed = true;
1128 	}
1129 
1130       if (dump_file && (dump_flags & TDF_DETAILS))
1131 	{
1132 	  if (did_replace)
1133 	    {
1134 	      fprintf (dump_file, "Folded into: ");
1135 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1136 	      fprintf (dump_file, "\n");
1137 	    }
1138 	  else
1139 	    fprintf (dump_file, "Not folded\n");
1140 	}
1141     }
1142   return NULL;
1143 }
1144 
1145 
1146 
1147 /* Perform final substitution and folding of propagated values.
1148 
1149    PROP_VALUE[I] contains the single value that should be substituted
1150    at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1151    substituted.
1152 
1153    If FOLD_FN is non-NULL the function will be invoked on all statements
1154    before propagating values for pass specific simplification.
1155 
1156    DO_DCE is true if trivially dead stmts can be removed.
1157 
1158    If DO_DCE is true, the statements within a BB are walked from
1159    last to first element.  Otherwise we scan from first to last element.
1160 
1161    Return TRUE when something changed.  */
1162 
1163 bool
1164 substitute_and_fold_engine::substitute_and_fold (void)
1165 {
1166   if (dump_file && (dump_flags & TDF_DETAILS))
1167     fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1168 
1169   memset (&prop_stats, 0, sizeof (prop_stats));
1170 
1171   calculate_dominance_info (CDI_DOMINATORS);
1172   substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
1173   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1174 
1175   /* We cannot remove stmts during the BB walk, especially not release
1176      SSA names there as that destroys the lattice of our callers.
1177      Remove stmts in reverse order to make debug stmt creation possible.  */
1178   while (!walker.stmts_to_remove.is_empty ())
1179     {
1180       gimple *stmt = walker.stmts_to_remove.pop ();
1181       if (dump_file && dump_flags & TDF_DETAILS)
1182 	{
1183 	  fprintf (dump_file, "Removing dead stmt ");
1184 	  print_gimple_stmt (dump_file, stmt, 0);
1185 	  fprintf (dump_file, "\n");
1186 	}
1187       prop_stats.num_dce++;
1188       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1189       if (gimple_code (stmt) == GIMPLE_PHI)
1190 	remove_phi_node (&gsi, true);
1191       else
1192 	{
1193 	  unlink_stmt_vdef (stmt);
1194 	  gsi_remove (&gsi, true);
1195 	  release_defs (stmt);
1196 	}
1197     }
1198 
1199   if (!bitmap_empty_p (walker.need_eh_cleanup))
1200     gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1201 
1202   /* Fixup stmts that became noreturn calls.  This may require splitting
1203      blocks and thus isn't possible during the dominator walk.  Do this
1204      in reverse order so we don't inadvertedly remove a stmt we want to
1205      fixup by visiting a dominating now noreturn call first.  */
1206   while (!walker.stmts_to_fixup.is_empty ())
1207     {
1208       gimple *stmt = walker.stmts_to_fixup.pop ();
1209       if (dump_file && dump_flags & TDF_DETAILS)
1210 	{
1211 	  fprintf (dump_file, "Fixing up noreturn call ");
1212 	  print_gimple_stmt (dump_file, stmt, 0);
1213 	  fprintf (dump_file, "\n");
1214 	}
1215       fixup_noreturn_call (stmt);
1216     }
1217 
1218   statistics_counter_event (cfun, "Constants propagated",
1219 			    prop_stats.num_const_prop);
1220   statistics_counter_event (cfun, "Copies propagated",
1221 			    prop_stats.num_copy_prop);
1222   statistics_counter_event (cfun, "Statements folded",
1223 			    prop_stats.num_stmts_folded);
1224   statistics_counter_event (cfun, "Statements deleted",
1225 			    prop_stats.num_dce);
1226 
1227   return walker.something_changed;
1228 }
1229 
1230 
1231 /* Return true if we may propagate ORIG into DEST, false otherwise.  */
1232 
1233 bool
1234 may_propagate_copy (tree dest, tree orig)
1235 {
1236   tree type_d = TREE_TYPE (dest);
1237   tree type_o = TREE_TYPE (orig);
1238 
1239   /* If ORIG is a default definition which flows in from an abnormal edge
1240      then the copy can be propagated.  It is important that we do so to avoid
1241      uninitialized copies.  */
1242   if (TREE_CODE (orig) == SSA_NAME
1243       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1244       && SSA_NAME_IS_DEFAULT_DEF (orig)
1245       && (SSA_NAME_VAR (orig) == NULL_TREE
1246 	  || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1247     ;
1248   /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1249      be propagated.  */
1250   else if (TREE_CODE (orig) == SSA_NAME
1251 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1252     return false;
1253   /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1254      propagated.  */
1255   else if (TREE_CODE (dest) == SSA_NAME
1256 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1257     return false;
1258 
1259   /* Do not copy between types for which we *do* need a conversion.  */
1260   if (!useless_type_conversion_p (type_d, type_o))
1261     return false;
1262 
1263   /* Generally propagating virtual operands is not ok as that may
1264      create overlapping life-ranges.  */
1265   if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1266     return false;
1267 
1268   /* Anything else is OK.  */
1269   return true;
1270 }
1271 
1272 /* Like may_propagate_copy, but use as the destination expression
1273    the principal expression (typically, the RHS) contained in
1274    statement DEST.  This is more efficient when working with the
1275    gimple tuples representation.  */
1276 
1277 bool
1278 may_propagate_copy_into_stmt (gimple *dest, tree orig)
1279 {
1280   tree type_d;
1281   tree type_o;
1282 
1283   /* If the statement is a switch or a single-rhs assignment,
1284      then the expression to be replaced by the propagation may
1285      be an SSA_NAME.  Fortunately, there is an explicit tree
1286      for the expression, so we delegate to may_propagate_copy.  */
1287 
1288   if (gimple_assign_single_p (dest))
1289     return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1290   else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1291     return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1292 
1293   /* In other cases, the expression is not materialized, so there
1294      is no destination to pass to may_propagate_copy.  On the other
1295      hand, the expression cannot be an SSA_NAME, so the analysis
1296      is much simpler.  */
1297 
1298   if (TREE_CODE (orig) == SSA_NAME
1299       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1300     return false;
1301 
1302   if (is_gimple_assign (dest))
1303     type_d = TREE_TYPE (gimple_assign_lhs (dest));
1304   else if (gimple_code (dest) == GIMPLE_COND)
1305     type_d = boolean_type_node;
1306   else if (is_gimple_call (dest)
1307            && gimple_call_lhs (dest) != NULL_TREE)
1308     type_d = TREE_TYPE (gimple_call_lhs (dest));
1309   else
1310     gcc_unreachable ();
1311 
1312   type_o = TREE_TYPE (orig);
1313 
1314   if (!useless_type_conversion_p (type_d, type_o))
1315     return false;
1316 
1317   return true;
1318 }
1319 
1320 /* Similarly, but we know that we're propagating into an ASM_EXPR.  */
1321 
1322 bool
1323 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1324 {
1325   return true;
1326 }
1327 
1328 
1329 /* Common code for propagate_value and replace_exp.
1330 
1331    Replace use operand OP_P with VAL.  FOR_PROPAGATION indicates if the
1332    replacement is done to propagate a value or not.  */
1333 
1334 static void
1335 replace_exp_1 (use_operand_p op_p, tree val,
1336     	       bool for_propagation ATTRIBUTE_UNUSED)
1337 {
1338   if (flag_checking)
1339     {
1340       tree op = USE_FROM_PTR (op_p);
1341       gcc_assert (!(for_propagation
1342 		  && TREE_CODE (op) == SSA_NAME
1343 		  && TREE_CODE (val) == SSA_NAME
1344 		  && !may_propagate_copy (op, val)));
1345     }
1346 
1347   if (TREE_CODE (val) == SSA_NAME)
1348     SET_USE (op_p, val);
1349   else
1350     SET_USE (op_p, unshare_expr (val));
1351 }
1352 
1353 
1354 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1355    into the operand pointed to by OP_P.
1356 
1357    Use this version for const/copy propagation as it will perform additional
1358    checks to ensure validity of the const/copy propagation.  */
1359 
1360 void
1361 propagate_value (use_operand_p op_p, tree val)
1362 {
1363   replace_exp_1 (op_p, val, true);
1364 }
1365 
1366 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1367 
1368    Use this version when not const/copy propagating values.  For example,
1369    PRE uses this version when building expressions as they would appear
1370    in specific blocks taking into account actions of PHI nodes.
1371 
1372    The statement in which an expression has been replaced should be
1373    folded using fold_stmt_inplace.  */
1374 
1375 void
1376 replace_exp (use_operand_p op_p, tree val)
1377 {
1378   replace_exp_1 (op_p, val, false);
1379 }
1380 
1381 
1382 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1383    into the tree pointed to by OP_P.
1384 
1385    Use this version for const/copy propagation when SSA operands are not
1386    available.  It will perform the additional checks to ensure validity of
1387    the const/copy propagation, but will not update any operand information.
1388    Be sure to mark the stmt as modified.  */
1389 
1390 void
1391 propagate_tree_value (tree *op_p, tree val)
1392 {
1393   if (TREE_CODE (val) == SSA_NAME)
1394     *op_p = val;
1395   else
1396     *op_p = unshare_expr (val);
1397 }
1398 
1399 
1400 /* Like propagate_tree_value, but use as the operand to replace
1401    the principal expression (typically, the RHS) contained in the
1402    statement referenced by iterator GSI.  Note that it is not
1403    always possible to update the statement in-place, so a new
1404    statement may be created to replace the original.  */
1405 
1406 void
1407 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1408 {
1409   gimple *stmt = gsi_stmt (*gsi);
1410 
1411   if (is_gimple_assign (stmt))
1412     {
1413       tree expr = NULL_TREE;
1414       if (gimple_assign_single_p (stmt))
1415         expr = gimple_assign_rhs1 (stmt);
1416       propagate_tree_value (&expr, val);
1417       gimple_assign_set_rhs_from_tree (gsi, expr);
1418     }
1419   else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1420     {
1421       tree lhs = NULL_TREE;
1422       tree rhs = build_zero_cst (TREE_TYPE (val));
1423       propagate_tree_value (&lhs, val);
1424       gimple_cond_set_code (cond_stmt, NE_EXPR);
1425       gimple_cond_set_lhs (cond_stmt, lhs);
1426       gimple_cond_set_rhs (cond_stmt, rhs);
1427     }
1428   else if (is_gimple_call (stmt)
1429            && gimple_call_lhs (stmt) != NULL_TREE)
1430     {
1431       tree expr = NULL_TREE;
1432       bool res;
1433       propagate_tree_value (&expr, val);
1434       res = update_call_from_tree (gsi, expr);
1435       gcc_assert (res);
1436     }
1437   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1438     propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1439   else
1440     gcc_unreachable ();
1441 }
1442