1 /* Convert a program in SSA form into Normal form.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3    Contributed by Andrew Macleod <amacleod@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "ssa.h"
30 #include "memmodel.h"
31 #include "emit-rtl.h"
32 #include "gimple-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "stor-layout.h"
35 #include "cfgrtl.h"
36 #include "cfganal.h"
37 #include "tree-eh.h"
38 #include "gimple-iterator.h"
39 #include "tree-cfg.h"
40 #include "dumpfile.h"
41 #include "tree-ssa-live.h"
42 #include "tree-ssa-ter.h"
43 #include "tree-ssa-coalesce.h"
44 #include "tree-outof-ssa.h"
45 #include "dojump.h"
46 
47 /* FIXME: A lot of code here deals with expanding to RTL.  All that code
48    should be in cfgexpand.c.  */
49 #include "explow.h"
50 #include "expr.h"
51 
52 /* Return TRUE if expression STMT is suitable for replacement.  */
53 
54 bool
55 ssa_is_replaceable_p (gimple *stmt)
56 {
57   use_operand_p use_p;
58   tree def;
59   gimple *use_stmt;
60 
61   /* Only consider modify stmts.  */
62   if (!is_gimple_assign (stmt))
63     return false;
64 
65   /* If the statement may throw an exception, it cannot be replaced.  */
66   if (stmt_could_throw_p (stmt))
67     return false;
68 
69   /* Punt if there is more than 1 def.  */
70   def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
71   if (!def)
72     return false;
73 
74   /* Only consider definitions which have a single use.  */
75   if (!single_imm_use (def, &use_p, &use_stmt))
76     return false;
77 
78   /* Used in this block, but at the TOP of the block, not the end.  */
79   if (gimple_code (use_stmt) == GIMPLE_PHI)
80     return false;
81 
82   /* There must be no VDEFs.  */
83   if (gimple_vdef (stmt))
84     return false;
85 
86   /* Float expressions must go through memory if float-store is on.  */
87   if (flag_float_store
88       && FLOAT_TYPE_P (gimple_expr_type (stmt)))
89     return false;
90 
91   /* An assignment with a register variable on the RHS is not
92      replaceable.  */
93   if (gimple_assign_rhs_code (stmt) == VAR_DECL
94       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
95     return false;
96 
97   /* No function calls can be replaced.  */
98   if (is_gimple_call (stmt))
99     return false;
100 
101   /* Leave any stmt with volatile operands alone as well.  */
102   if (gimple_has_volatile_ops (stmt))
103     return false;
104 
105   return true;
106 }
107 
108 
109 /* Used to hold all the components required to do SSA PHI elimination.
110    The node and pred/succ list is a simple linear list of nodes and
111    edges represented as pairs of nodes.
112 
113    The predecessor and successor list:  Nodes are entered in pairs, where
114    [0] ->PRED, [1]->SUCC.  All the even indexes in the array represent
115    predecessors, all the odd elements are successors.
116 
117    Rationale:
118    When implemented as bitmaps, very large programs SSA->Normal times were
119    being dominated by clearing the interference graph.
120 
121    Typically this list of edges is extremely small since it only includes
122    PHI results and uses from a single edge which have not coalesced with
123    each other.  This means that no virtual PHI nodes are included, and
124    empirical evidence suggests that the number of edges rarely exceed
125    3, and in a bootstrap of GCC, the maximum size encountered was 7.
126    This also limits the number of possible nodes that are involved to
127    rarely more than 6, and in the bootstrap of gcc, the maximum number
128    of nodes encountered was 12.  */
129 
130 struct elim_graph
131 {
132   elim_graph (var_map map);
133 
134   /* Size of the elimination vectors.  */
135   int size;
136 
137   /* List of nodes in the elimination graph.  */
138   auto_vec<int> nodes;
139 
140   /*  The predecessor and successor edge list.  */
141   auto_vec<int> edge_list;
142 
143   /* Source locus on each edge */
144   auto_vec<source_location> edge_locus;
145 
146   /* Visited vector.  */
147   auto_sbitmap visited;
148 
149   /* Stack for visited nodes.  */
150   auto_vec<int> stack;
151 
152   /* The variable partition map.  */
153   var_map map;
154 
155   /* Edge being eliminated by this graph.  */
156   edge e;
157 
158   /* List of constant copies to emit.  These are pushed on in pairs.  */
159   auto_vec<int> const_dests;
160   auto_vec<tree> const_copies;
161 
162   /* Source locations for any constant copies.  */
163   auto_vec<source_location> copy_locus;
164 };
165 
166 
167 /* For an edge E find out a good source location to associate with
168    instructions inserted on edge E.  If E has an implicit goto set,
169    use its location.  Otherwise search instructions in predecessors
170    of E for a location, and use that one.  That makes sense because
171    we insert on edges for PHI nodes, and effects of PHIs happen on
172    the end of the predecessor conceptually.  */
173 
174 static void
175 set_location_for_edge (edge e)
176 {
177   if (e->goto_locus)
178     {
179       set_curr_insn_location (e->goto_locus);
180     }
181   else
182     {
183       basic_block bb = e->src;
184       gimple_stmt_iterator gsi;
185 
186       do
187 	{
188 	  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
189 	    {
190 	      gimple *stmt = gsi_stmt (gsi);
191 	      if (is_gimple_debug (stmt))
192 		continue;
193 	      if (gimple_has_location (stmt) || gimple_block (stmt))
194 		{
195 		  set_curr_insn_location (gimple_location (stmt));
196 		  return;
197 		}
198 	    }
199 	  /* Nothing found in this basic block.  Make a half-assed attempt
200 	     to continue with another block.  */
201 	  if (single_pred_p (bb))
202 	    bb = single_pred (bb);
203 	  else
204 	    bb = e->src;
205 	}
206       while (bb != e->src);
207     }
208 }
209 
210 /* Emit insns to copy SRC into DEST converting SRC if necessary.  As
211    SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
212    which we deduce the size to copy in that case.  */
213 
214 static inline rtx_insn *
215 emit_partition_copy (rtx dest, rtx src, int unsignedsrcp, tree sizeexp)
216 {
217   start_sequence ();
218 
219   if (GET_MODE (src) != VOIDmode && GET_MODE (src) != GET_MODE (dest))
220     src = convert_to_mode (GET_MODE (dest), src, unsignedsrcp);
221   if (GET_MODE (src) == BLKmode)
222     {
223       gcc_assert (GET_MODE (dest) == BLKmode);
224       emit_block_move (dest, src, expr_size (sizeexp), BLOCK_OP_NORMAL);
225     }
226   else
227     emit_move_insn (dest, src);
228   do_pending_stack_adjust ();
229 
230   rtx_insn *seq = get_insns ();
231   end_sequence ();
232 
233   return seq;
234 }
235 
236 /* Insert a copy instruction from partition SRC to DEST onto edge E.  */
237 
238 static void
239 insert_partition_copy_on_edge (edge e, int dest, int src, source_location locus)
240 {
241   tree var;
242   if (dump_file && (dump_flags & TDF_DETAILS))
243     {
244       fprintf (dump_file,
245 	       "Inserting a partition copy on edge BB%d->BB%d : "
246 	       "PART.%d = PART.%d",
247 	       e->src->index,
248 	       e->dest->index, dest, src);
249       fprintf (dump_file, "\n");
250     }
251 
252   gcc_assert (SA.partition_to_pseudo[dest]);
253   gcc_assert (SA.partition_to_pseudo[src]);
254 
255   set_location_for_edge (e);
256   /* If a locus is provided, override the default.  */
257   if (locus)
258     set_curr_insn_location (locus);
259 
260   var = partition_to_var (SA.map, src);
261   rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
262 				       copy_rtx (SA.partition_to_pseudo[src]),
263 				       TYPE_UNSIGNED (TREE_TYPE (var)),
264 				       var);
265 
266   insert_insn_on_edge (seq, e);
267 }
268 
269 /* Insert a copy instruction from expression SRC to partition DEST
270    onto edge E.  */
271 
272 static void
273 insert_value_copy_on_edge (edge e, int dest, tree src, source_location locus)
274 {
275   rtx dest_rtx, seq, x;
276   machine_mode dest_mode, src_mode;
277   int unsignedp;
278 
279   if (dump_file && (dump_flags & TDF_DETAILS))
280     {
281       fprintf (dump_file,
282 	       "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
283 	       e->src->index,
284 	       e->dest->index, dest);
285       print_generic_expr (dump_file, src, TDF_SLIM);
286       fprintf (dump_file, "\n");
287     }
288 
289   dest_rtx = copy_rtx (SA.partition_to_pseudo[dest]);
290   gcc_assert (dest_rtx);
291 
292   set_location_for_edge (e);
293   /* If a locus is provided, override the default.  */
294   if (locus)
295     set_curr_insn_location (locus);
296 
297   start_sequence ();
298 
299   tree name = partition_to_var (SA.map, dest);
300   src_mode = TYPE_MODE (TREE_TYPE (src));
301   dest_mode = GET_MODE (dest_rtx);
302   gcc_assert (src_mode == TYPE_MODE (TREE_TYPE (name)));
303   gcc_assert (!REG_P (dest_rtx)
304 	      || dest_mode == promote_ssa_mode (name, &unsignedp));
305 
306   if (src_mode != dest_mode)
307     {
308       x = expand_expr (src, NULL, src_mode, EXPAND_NORMAL);
309       x = convert_modes (dest_mode, src_mode, x, unsignedp);
310     }
311   else if (src_mode == BLKmode)
312     {
313       x = dest_rtx;
314       store_expr (src, x, 0, false, false);
315     }
316   else
317     x = expand_expr (src, dest_rtx, dest_mode, EXPAND_NORMAL);
318 
319   if (x != dest_rtx)
320     emit_move_insn (dest_rtx, x);
321   do_pending_stack_adjust ();
322 
323   seq = get_insns ();
324   end_sequence ();
325 
326   insert_insn_on_edge (seq, e);
327 }
328 
329 /* Insert a copy instruction from RTL expression SRC to partition DEST
330    onto edge E.  */
331 
332 static void
333 insert_rtx_to_part_on_edge (edge e, int dest, rtx src, int unsignedsrcp,
334 			    source_location locus)
335 {
336   if (dump_file && (dump_flags & TDF_DETAILS))
337     {
338       fprintf (dump_file,
339 	       "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
340 	       e->src->index,
341 	       e->dest->index, dest);
342       print_simple_rtl (dump_file, src);
343       fprintf (dump_file, "\n");
344     }
345 
346   gcc_assert (SA.partition_to_pseudo[dest]);
347 
348   set_location_for_edge (e);
349   /* If a locus is provided, override the default.  */
350   if (locus)
351     set_curr_insn_location (locus);
352 
353   /* We give the destination as sizeexp in case src/dest are BLKmode
354      mems.  Usually we give the source.  As we result from SSA names
355      the left and right size should be the same (and no WITH_SIZE_EXPR
356      involved), so it doesn't matter.  */
357   rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
358 				       src, unsignedsrcp,
359 				       partition_to_var (SA.map, dest));
360 
361   insert_insn_on_edge (seq, e);
362 }
363 
364 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
365    onto edge E.  */
366 
367 static void
368 insert_part_to_rtx_on_edge (edge e, rtx dest, int src, source_location locus)
369 {
370   tree var;
371   if (dump_file && (dump_flags & TDF_DETAILS))
372     {
373       fprintf (dump_file,
374 	       "Inserting a temp copy on edge BB%d->BB%d : ",
375 	       e->src->index,
376 	       e->dest->index);
377       print_simple_rtl (dump_file, dest);
378       fprintf (dump_file, "= PART.%d\n", src);
379     }
380 
381   gcc_assert (SA.partition_to_pseudo[src]);
382 
383   set_location_for_edge (e);
384   /* If a locus is provided, override the default.  */
385   if (locus)
386     set_curr_insn_location (locus);
387 
388   var = partition_to_var (SA.map, src);
389   rtx_insn *seq = emit_partition_copy (dest,
390 				       copy_rtx (SA.partition_to_pseudo[src]),
391 				       TYPE_UNSIGNED (TREE_TYPE (var)),
392 				       var);
393 
394   insert_insn_on_edge (seq, e);
395 }
396 
397 
398 /* Create an elimination graph for map.  */
399 
400 elim_graph::elim_graph (var_map map) :
401   nodes (30), edge_list (20), edge_locus (10), visited (map->num_partitions),
402   stack (30), map (map), const_dests (20), const_copies (20), copy_locus (10)
403 {
404 }
405 
406 
407 /* Empty elimination graph G.  */
408 
409 static inline void
410 clear_elim_graph (elim_graph *g)
411 {
412   g->nodes.truncate (0);
413   g->edge_list.truncate (0);
414   g->edge_locus.truncate (0);
415 }
416 
417 
418 /* Return the number of nodes in graph G.  */
419 
420 static inline int
421 elim_graph_size (elim_graph *g)
422 {
423   return g->nodes.length ();
424 }
425 
426 
427 /* Add NODE to graph G, if it doesn't exist already.  */
428 
429 static inline void
430 elim_graph_add_node (elim_graph *g, int node)
431 {
432   int x;
433   int t;
434 
435   FOR_EACH_VEC_ELT (g->nodes, x, t)
436     if (t == node)
437       return;
438   g->nodes.safe_push (node);
439 }
440 
441 
442 /* Add the edge PRED->SUCC to graph G.  */
443 
444 static inline void
445 elim_graph_add_edge (elim_graph *g, int pred, int succ, source_location locus)
446 {
447   g->edge_list.safe_push (pred);
448   g->edge_list.safe_push (succ);
449   g->edge_locus.safe_push (locus);
450 }
451 
452 
453 /* Remove an edge from graph G for which NODE is the predecessor, and
454    return the successor node.  -1 is returned if there is no such edge.  */
455 
456 static inline int
457 elim_graph_remove_succ_edge (elim_graph *g, int node, source_location *locus)
458 {
459   int y;
460   unsigned x;
461   for (x = 0; x < g->edge_list.length (); x += 2)
462     if (g->edge_list[x] == node)
463       {
464         g->edge_list[x] = -1;
465 	y = g->edge_list[x + 1];
466 	g->edge_list[x + 1] = -1;
467 	*locus = g->edge_locus[x / 2];
468 	g->edge_locus[x / 2] = UNKNOWN_LOCATION;
469 	return y;
470       }
471   *locus = UNKNOWN_LOCATION;
472   return -1;
473 }
474 
475 
476 /* Find all the nodes in GRAPH which are successors to NODE in the
477    edge list.  VAR will hold the partition number found.  CODE is the
478    code fragment executed for every node found.  */
479 
480 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE)		\
481 do {									\
482   unsigned x_;								\
483   int y_;								\
484   for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2)	\
485     {									\
486       y_ = (GRAPH)->edge_list[x_];					\
487       if (y_ != (NODE))							\
488         continue;							\
489       (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]);			\
490       (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]);			\
491       CODE;								\
492     }									\
493 } while (0)
494 
495 
496 /* Find all the nodes which are predecessors of NODE in the edge list for
497    GRAPH.  VAR will hold the partition number found.  CODE is the
498    code fragment executed for every node found.  */
499 
500 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE)		\
501 do {									\
502   unsigned x_;								\
503   int y_;								\
504   for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2)	\
505     {									\
506       y_ = (GRAPH)->edge_list[x_ + 1];					\
507       if (y_ != (NODE))							\
508         continue;							\
509       (void) ((VAR) = (GRAPH)->edge_list[x_]);				\
510       (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]);			\
511       CODE;								\
512     }									\
513 } while (0)
514 
515 
516 /* Add T to elimination graph G.  */
517 
518 static inline void
519 eliminate_name (elim_graph *g, int T)
520 {
521   elim_graph_add_node (g, T);
522 }
523 
524 /* Return true if this phi argument T should have a copy queued when using
525    var_map MAP.  PHI nodes should contain only ssa_names and invariants.  A
526    test for ssa_name is definitely simpler, but don't let invalid contents
527    slip through in the meantime.  */
528 
529 static inline bool
530 queue_phi_copy_p (var_map map, tree t)
531 {
532   if (TREE_CODE (t) == SSA_NAME)
533     {
534       if (var_to_partition (map, t) == NO_PARTITION)
535         return true;
536       return false;
537     }
538   gcc_checking_assert (is_gimple_min_invariant (t));
539   return true;
540 }
541 
542 /* Build elimination graph G for basic block BB on incoming PHI edge
543    G->e.  */
544 
545 static void
546 eliminate_build (elim_graph *g)
547 {
548   tree Ti;
549   int p0, pi;
550   gphi_iterator gsi;
551 
552   clear_elim_graph (g);
553 
554   for (gsi = gsi_start_phis (g->e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
555     {
556       gphi *phi = gsi.phi ();
557       source_location locus;
558 
559       p0 = var_to_partition (g->map, gimple_phi_result (phi));
560       /* Ignore results which are not in partitions.  */
561       if (p0 == NO_PARTITION)
562 	continue;
563 
564       Ti = PHI_ARG_DEF (phi, g->e->dest_idx);
565       locus = gimple_phi_arg_location_from_edge (phi, g->e);
566 
567       /* If this argument is a constant, or a SSA_NAME which is being
568 	 left in SSA form, just queue a copy to be emitted on this
569 	 edge.  */
570       if (queue_phi_copy_p (g->map, Ti))
571         {
572 	  /* Save constant copies until all other copies have been emitted
573 	     on this edge.  */
574 	  g->const_dests.safe_push (p0);
575 	  g->const_copies.safe_push (Ti);
576 	  g->copy_locus.safe_push (locus);
577 	}
578       else
579         {
580 	  pi = var_to_partition (g->map, Ti);
581 	  if (p0 != pi)
582 	    {
583 	      eliminate_name (g, p0);
584 	      eliminate_name (g, pi);
585 	      elim_graph_add_edge (g, p0, pi, locus);
586 	    }
587 	}
588     }
589 }
590 
591 
592 /* Push successors of T onto the elimination stack for G.  */
593 
594 static void
595 elim_forward (elim_graph *g, int T)
596 {
597   int S;
598   source_location locus;
599 
600   bitmap_set_bit (g->visited, T);
601   FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, locus,
602     {
603       if (!bitmap_bit_p (g->visited, S))
604         elim_forward (g, S);
605     });
606   g->stack.safe_push (T);
607 }
608 
609 
610 /* Return 1 if there unvisited predecessors of T in graph G.  */
611 
612 static int
613 elim_unvisited_predecessor (elim_graph *g, int T)
614 {
615   int P;
616   source_location locus;
617 
618   FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
619     {
620       if (!bitmap_bit_p (g->visited, P))
621         return 1;
622     });
623   return 0;
624 }
625 
626 /* Process predecessors first, and insert a copy.  */
627 
628 static void
629 elim_backward (elim_graph *g, int T)
630 {
631   int P;
632   source_location locus;
633 
634   bitmap_set_bit (g->visited, T);
635   FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
636     {
637       if (!bitmap_bit_p (g->visited, P))
638         {
639 	  elim_backward (g, P);
640 	  insert_partition_copy_on_edge (g->e, P, T, locus);
641 	}
642     });
643 }
644 
645 /* Allocate a new pseudo register usable for storing values sitting
646    in NAME (a decl or SSA name), i.e. with matching mode and attributes.  */
647 
648 static rtx
649 get_temp_reg (tree name)
650 {
651   tree type = TREE_TYPE (name);
652   int unsignedp;
653   machine_mode reg_mode = promote_ssa_mode (name, &unsignedp);
654   rtx x = gen_reg_rtx (reg_mode);
655   if (POINTER_TYPE_P (type))
656     mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
657   return x;
658 }
659 
660 /* Insert required copies for T in graph G.  Check for a strongly connected
661    region, and create a temporary to break the cycle if one is found.  */
662 
663 static void
664 elim_create (elim_graph *g, int T)
665 {
666   int P, S;
667   source_location locus;
668 
669   if (elim_unvisited_predecessor (g, T))
670     {
671       tree var = partition_to_var (g->map, T);
672       rtx U = get_temp_reg (var);
673       int unsignedsrcp = TYPE_UNSIGNED (TREE_TYPE (var));
674 
675       insert_part_to_rtx_on_edge (g->e, U, T, UNKNOWN_LOCATION);
676       FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
677 	{
678 	  if (!bitmap_bit_p (g->visited, P))
679 	    {
680 	      elim_backward (g, P);
681 	      insert_rtx_to_part_on_edge (g->e, P, U, unsignedsrcp, locus);
682 	    }
683 	});
684     }
685   else
686     {
687       S = elim_graph_remove_succ_edge (g, T, &locus);
688       if (S != -1)
689 	{
690 	  bitmap_set_bit (g->visited, T);
691 	  insert_partition_copy_on_edge (g->e, T, S, locus);
692 	}
693     }
694 }
695 
696 
697 /* Eliminate all the phi nodes on edge E in graph G.  */
698 
699 static void
700 eliminate_phi (edge e, elim_graph *g)
701 {
702   int x;
703 
704   gcc_assert (g->const_copies.length () == 0);
705   gcc_assert (g->copy_locus.length () == 0);
706 
707   /* Abnormal edges already have everything coalesced.  */
708   if (e->flags & EDGE_ABNORMAL)
709     return;
710 
711   g->e = e;
712 
713   eliminate_build (g);
714 
715   if (elim_graph_size (g) != 0)
716     {
717       int part;
718 
719       bitmap_clear (g->visited);
720       g->stack.truncate (0);
721 
722       FOR_EACH_VEC_ELT (g->nodes, x, part)
723         {
724 	  if (!bitmap_bit_p (g->visited, part))
725 	    elim_forward (g, part);
726 	}
727 
728       bitmap_clear (g->visited);
729       while (g->stack.length () > 0)
730 	{
731 	  x = g->stack.pop ();
732 	  if (!bitmap_bit_p (g->visited, x))
733 	    elim_create (g, x);
734 	}
735     }
736 
737   /* If there are any pending constant copies, issue them now.  */
738   while (g->const_copies.length () > 0)
739     {
740       int dest;
741       tree src;
742       source_location locus;
743 
744       src = g->const_copies.pop ();
745       dest = g->const_dests.pop ();
746       locus = g->copy_locus.pop ();
747       insert_value_copy_on_edge (e, dest, src, locus);
748     }
749 }
750 
751 
752 /* Remove each argument from PHI.  If an arg was the last use of an SSA_NAME,
753    check to see if this allows another PHI node to be removed.  */
754 
755 static void
756 remove_gimple_phi_args (gphi *phi)
757 {
758   use_operand_p arg_p;
759   ssa_op_iter iter;
760 
761   if (dump_file && (dump_flags & TDF_DETAILS))
762     {
763       fprintf (dump_file, "Removing Dead PHI definition: ");
764       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
765     }
766 
767   FOR_EACH_PHI_ARG (arg_p, phi, iter, SSA_OP_USE)
768     {
769       tree arg = USE_FROM_PTR (arg_p);
770       if (TREE_CODE (arg) == SSA_NAME)
771         {
772 	  /* Remove the reference to the existing argument.  */
773 	  SET_USE (arg_p, NULL_TREE);
774 	  if (has_zero_uses (arg))
775 	    {
776 	      gimple *stmt;
777 	      gimple_stmt_iterator gsi;
778 
779 	      stmt = SSA_NAME_DEF_STMT (arg);
780 
781 	      /* Also remove the def if it is a PHI node.  */
782 	      if (gimple_code (stmt) == GIMPLE_PHI)
783 		{
784 		  remove_gimple_phi_args (as_a <gphi *> (stmt));
785 		  gsi = gsi_for_stmt (stmt);
786 		  remove_phi_node (&gsi, true);
787 		}
788 
789 	    }
790 	}
791     }
792 }
793 
794 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses.  */
795 
796 static void
797 eliminate_useless_phis (void)
798 {
799   basic_block bb;
800   gphi_iterator gsi;
801   tree result;
802 
803   FOR_EACH_BB_FN (bb, cfun)
804     {
805       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
806         {
807 	  gphi *phi = gsi.phi ();
808 	  result = gimple_phi_result (phi);
809 	  if (virtual_operand_p (result))
810 	    {
811 	      /* There should be no arguments which are not virtual, or the
812 	         results will be incorrect.  */
813 	      if (flag_checking)
814 		for (size_t i = 0; i < gimple_phi_num_args (phi); i++)
815 		  {
816 		    tree arg = PHI_ARG_DEF (phi, i);
817 		    if (TREE_CODE (arg) == SSA_NAME
818 			&& !virtual_operand_p (arg))
819 		      {
820 			fprintf (stderr, "Argument of PHI is not virtual (");
821 			print_generic_expr (stderr, arg, TDF_SLIM);
822 			fprintf (stderr, "), but the result is :");
823 			print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
824 			internal_error ("SSA corruption");
825 		      }
826 		  }
827 
828 	      remove_phi_node (&gsi, true);
829 	    }
830           else
831 	    {
832 	      /* Also remove real PHIs with no uses.  */
833 	      if (has_zero_uses (result))
834 	        {
835 		  remove_gimple_phi_args (phi);
836 		  remove_phi_node (&gsi, true);
837 		}
838 	      else
839 		gsi_next (&gsi);
840 	    }
841 	}
842     }
843 }
844 
845 
846 /* This function will rewrite the current program using the variable mapping
847    found in MAP.  If the replacement vector VALUES is provided, any
848    occurrences of partitions with non-null entries in the vector will be
849    replaced with the expression in the vector instead of its mapped
850    variable.  */
851 
852 static void
853 rewrite_trees (var_map map)
854 {
855   if (!flag_checking)
856     return;
857 
858   basic_block bb;
859   /* Search for PHIs where the destination has no partition, but one
860      or more arguments has a partition.  This should not happen and can
861      create incorrect code.  */
862   FOR_EACH_BB_FN (bb, cfun)
863     {
864       gphi_iterator gsi;
865       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
866 	{
867 	  gphi *phi = gsi.phi ();
868 	  tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
869 	  if (T0 == NULL_TREE)
870 	    {
871 	      size_t i;
872 	      for (i = 0; i < gimple_phi_num_args (phi); i++)
873 		{
874 		  tree arg = PHI_ARG_DEF (phi, i);
875 
876 		  if (TREE_CODE (arg) == SSA_NAME
877 		      && var_to_partition (map, arg) != NO_PARTITION)
878 		    {
879 		      fprintf (stderr, "Argument of PHI is in a partition :(");
880 		      print_generic_expr (stderr, arg, TDF_SLIM);
881 		      fprintf (stderr, "), but the result is not :");
882 		      print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
883 		      internal_error ("SSA corruption");
884 		    }
885 		}
886 	    }
887 	}
888     }
889 }
890 
891 /* Given the out-of-ssa info object SA (with prepared partitions)
892    eliminate all phi nodes in all basic blocks.  Afterwards no
893    basic block will have phi nodes anymore and there are possibly
894    some RTL instructions inserted on edges.  */
895 
896 void
897 expand_phi_nodes (struct ssaexpand *sa)
898 {
899   basic_block bb;
900   elim_graph g (sa->map);
901 
902   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
903 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
904     if (!gimple_seq_empty_p (phi_nodes (bb)))
905       {
906 	edge e;
907 	edge_iterator ei;
908 	FOR_EACH_EDGE (e, ei, bb->preds)
909 	  eliminate_phi (e, &g);
910 	set_phi_nodes (bb, NULL);
911 	/* We can't redirect EH edges in RTL land, so we need to do this
912 	   here.  Redirection happens only when splitting is necessary,
913 	   which it is only for critical edges, normally.  For EH edges
914 	   it might also be necessary when the successor has more than
915 	   one predecessor.  In that case the edge is either required to
916 	   be fallthru (which EH edges aren't), or the predecessor needs
917 	   to end with a jump (which again, isn't the case with EH edges).
918 	   Hence, split all EH edges on which we inserted instructions
919 	   and whose successor has multiple predecessors.  */
920 	for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
921 	  {
922 	    if (e->insns.r && (e->flags & EDGE_EH)
923 		&& !single_pred_p (e->dest))
924 	      {
925 		rtx_insn *insns = e->insns.r;
926 		basic_block bb;
927 		e->insns.r = NULL;
928 		bb = split_edge (e);
929 		single_pred_edge (bb)->insns.r = insns;
930 	      }
931 	    else
932 	      ei_next (&ei);
933 	  }
934       }
935 }
936 
937 
938 /* Remove the ssa-names in the current function and translate them into normal
939    compiler variables.  PERFORM_TER is true if Temporary Expression Replacement
940    should also be used.  */
941 
942 static void
943 remove_ssa_form (bool perform_ter, struct ssaexpand *sa)
944 {
945   bitmap values = NULL;
946   var_map map;
947 
948   map = coalesce_ssa_name ();
949 
950   /* Return to viewing the variable list as just all reference variables after
951      coalescing has been performed.  */
952   partition_view_normal (map);
953 
954   if (dump_file && (dump_flags & TDF_DETAILS))
955     {
956       fprintf (dump_file, "After Coalescing:\n");
957       dump_var_map (dump_file, map);
958     }
959 
960   if (perform_ter)
961     {
962       values = find_replaceable_exprs (map);
963       if (values && dump_file && (dump_flags & TDF_DETAILS))
964 	dump_replaceable_exprs (dump_file, values);
965     }
966 
967   rewrite_trees (map);
968 
969   sa->map = map;
970   sa->values = values;
971   sa->partitions_for_parm_default_defs = get_parm_default_def_partitions (map);
972   sa->partitions_for_undefined_values = get_undefined_value_partitions (map);
973 }
974 
975 
976 /* If not already done so for basic block BB, assign increasing uids
977    to each of its instructions.  */
978 
979 static void
980 maybe_renumber_stmts_bb (basic_block bb)
981 {
982   unsigned i = 0;
983   gimple_stmt_iterator gsi;
984 
985   if (!bb->aux)
986     return;
987   bb->aux = NULL;
988   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
989     {
990       gimple *stmt = gsi_stmt (gsi);
991       gimple_set_uid (stmt, i);
992       i++;
993     }
994 }
995 
996 
997 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
998    of a PHI node) and ARG (one of its arguments) conflict.  Return false
999    otherwise, also when we simply aren't sure.  */
1000 
1001 static bool
1002 trivially_conflicts_p (basic_block bb, tree result, tree arg)
1003 {
1004   use_operand_p use;
1005   imm_use_iterator imm_iter;
1006   gimple *defa = SSA_NAME_DEF_STMT (arg);
1007 
1008   /* If ARG isn't defined in the same block it's too complicated for
1009      our little mind.  */
1010   if (gimple_bb (defa) != bb)
1011     return false;
1012 
1013   FOR_EACH_IMM_USE_FAST (use, imm_iter, result)
1014     {
1015       gimple *use_stmt = USE_STMT (use);
1016       if (is_gimple_debug (use_stmt))
1017 	continue;
1018       /* Now, if there's a use of RESULT that lies outside this basic block,
1019 	 then there surely is a conflict with ARG.  */
1020       if (gimple_bb (use_stmt) != bb)
1021 	return true;
1022       if (gimple_code (use_stmt) == GIMPLE_PHI)
1023 	continue;
1024       /* The use now is in a real stmt of BB, so if ARG was defined
1025          in a PHI node (like RESULT) both conflict.  */
1026       if (gimple_code (defa) == GIMPLE_PHI)
1027 	return true;
1028       maybe_renumber_stmts_bb (bb);
1029       /* If the use of RESULT occurs after the definition of ARG,
1030          the two conflict too.  */
1031       if (gimple_uid (defa) < gimple_uid (use_stmt))
1032 	return true;
1033     }
1034 
1035   return false;
1036 }
1037 
1038 
1039 /* Search every PHI node for arguments associated with backedges which
1040    we can trivially determine will need a copy (the argument is either
1041    not an SSA_NAME or the argument has a different underlying variable
1042    than the PHI result).
1043 
1044    Insert a copy from the PHI argument to a new destination at the
1045    end of the block with the backedge to the top of the loop.  Update
1046    the PHI argument to reference this new destination.  */
1047 
1048 static void
1049 insert_backedge_copies (void)
1050 {
1051   basic_block bb;
1052   gphi_iterator gsi;
1053 
1054   mark_dfs_back_edges ();
1055 
1056   FOR_EACH_BB_FN (bb, cfun)
1057     {
1058       /* Mark block as possibly needing calculation of UIDs.  */
1059       bb->aux = &bb->aux;
1060 
1061       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1062 	{
1063 	  gphi *phi = gsi.phi ();
1064 	  tree result = gimple_phi_result (phi);
1065 	  size_t i;
1066 
1067 	  if (virtual_operand_p (result))
1068 	    continue;
1069 
1070 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1071 	    {
1072 	      tree arg = gimple_phi_arg_def (phi, i);
1073 	      edge e = gimple_phi_arg_edge (phi, i);
1074 
1075 	      /* If the argument is not an SSA_NAME, then we will need a
1076 		 constant initialization.  If the argument is an SSA_NAME with
1077 		 a different underlying variable then a copy statement will be
1078 		 needed.  */
1079 	      if ((e->flags & EDGE_DFS_BACK)
1080 		  && (TREE_CODE (arg) != SSA_NAME
1081 		      || SSA_NAME_VAR (arg) != SSA_NAME_VAR (result)
1082 		      || trivially_conflicts_p (bb, result, arg)))
1083 		{
1084 		  tree name;
1085 		  gassign *stmt;
1086 		  gimple *last = NULL;
1087 		  gimple_stmt_iterator gsi2;
1088 
1089 		  gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
1090 		  if (!gsi_end_p (gsi2))
1091 		    last = gsi_stmt (gsi2);
1092 
1093 		  /* In theory the only way we ought to get back to the
1094 		     start of a loop should be with a COND_EXPR or GOTO_EXPR.
1095 		     However, better safe than sorry.
1096 		     If the block ends with a control statement or
1097 		     something that might throw, then we have to
1098 		     insert this assignment before the last
1099 		     statement.  Else insert it after the last statement.  */
1100 		  if (last && stmt_ends_bb_p (last))
1101 		    {
1102 		      /* If the last statement in the block is the definition
1103 			 site of the PHI argument, then we can't insert
1104 			 anything after it.  */
1105 		      if (TREE_CODE (arg) == SSA_NAME
1106 			  && SSA_NAME_DEF_STMT (arg) == last)
1107 			continue;
1108 		    }
1109 
1110 		  /* Create a new instance of the underlying variable of the
1111 		     PHI result.  */
1112 		  name = copy_ssa_name (result);
1113 		  stmt = gimple_build_assign (name,
1114 					      gimple_phi_arg_def (phi, i));
1115 
1116 		  /* copy location if present.  */
1117 		  if (gimple_phi_arg_has_location (phi, i))
1118 		    gimple_set_location (stmt,
1119 					 gimple_phi_arg_location (phi, i));
1120 
1121 		  /* Insert the new statement into the block and update
1122 		     the PHI node.  */
1123 		  if (last && stmt_ends_bb_p (last))
1124 		    gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
1125 		  else
1126 		    gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
1127 		  SET_PHI_ARG_DEF (phi, i, name);
1128 		}
1129 	    }
1130 	}
1131 
1132       /* Unmark this block again.  */
1133       bb->aux = NULL;
1134     }
1135 }
1136 
1137 /* Free all memory associated with going out of SSA form.  SA is
1138    the outof-SSA info object.  */
1139 
1140 void
1141 finish_out_of_ssa (struct ssaexpand *sa)
1142 {
1143   free (sa->partition_to_pseudo);
1144   if (sa->values)
1145     BITMAP_FREE (sa->values);
1146   delete_var_map (sa->map);
1147   BITMAP_FREE (sa->partitions_for_parm_default_defs);
1148   BITMAP_FREE (sa->partitions_for_undefined_values);
1149   memset (sa, 0, sizeof *sa);
1150 }
1151 
1152 /* Take the current function out of SSA form, translating PHIs as described in
1153    R. Morgan, ``Building an Optimizing Compiler'',
1154    Butterworth-Heinemann, Boston, MA, 1998. pp 176-186.  */
1155 
1156 unsigned int
1157 rewrite_out_of_ssa (struct ssaexpand *sa)
1158 {
1159   /* If elimination of a PHI requires inserting a copy on a backedge,
1160      then we will have to split the backedge which has numerous
1161      undesirable performance effects.
1162 
1163      A significant number of such cases can be handled here by inserting
1164      copies into the loop itself.  */
1165   insert_backedge_copies ();
1166 
1167 
1168   /* Eliminate PHIs which are of no use, such as virtual or dead phis.  */
1169   eliminate_useless_phis ();
1170 
1171   if (dump_file && (dump_flags & TDF_DETAILS))
1172     gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1173 
1174   remove_ssa_form (flag_tree_ter, sa);
1175 
1176   if (dump_file && (dump_flags & TDF_DETAILS))
1177     gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1178 
1179   return 0;
1180 }
1181