xref: /dragonfly/contrib/gcc-4.7/gcc/tree-ssa.c (revision e4b17023)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "ggc.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "gimple.h"
40 #include "tree-inline.h"
41 #include "timevar.h"
42 #include "hashtab.h"
43 #include "tree-dump.h"
44 #include "tree-pass.h"
45 #include "diagnostic-core.h"
46 #include "cfgloop.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static struct pointer_map_t *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
redirect_edge_var_map_add(edge e,tree result,tree def,source_location locus)55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
56 {
57   void **slot;
58   edge_var_map_vector old_head, head;
59   edge_var_map new_node;
60 
61   if (edge_var_maps == NULL)
62     edge_var_maps = pointer_map_create ();
63 
64   slot = pointer_map_insert (edge_var_maps, e);
65   old_head = head = (edge_var_map_vector) *slot;
66   if (!head)
67     {
68       head = VEC_alloc (edge_var_map, heap, 5);
69       *slot = head;
70     }
71   new_node.def = def;
72   new_node.result = result;
73   new_node.locus = locus;
74 
75   VEC_safe_push (edge_var_map, heap, head, &new_node);
76   if (old_head != head)
77     {
78       /* The push did some reallocation.  Update the pointer map.  */
79       *slot = head;
80     }
81 }
82 
83 
84 /* Clear the var mappings in edge E.  */
85 
86 void
redirect_edge_var_map_clear(edge e)87 redirect_edge_var_map_clear (edge e)
88 {
89   void **slot;
90   edge_var_map_vector head;
91 
92   if (!edge_var_maps)
93     return;
94 
95   slot = pointer_map_contains (edge_var_maps, e);
96 
97   if (slot)
98     {
99       head = (edge_var_map_vector) *slot;
100       VEC_free (edge_var_map, heap, head);
101       *slot = NULL;
102     }
103 }
104 
105 
106 /* Duplicate the redirected var mappings in OLDE in NEWE.
107 
108    Since we can't remove a mapping, let's just duplicate it.  This assumes a
109    pointer_map can have multiple edges mapping to the same var_map (many to
110    one mapping), since we don't remove the previous mappings.  */
111 
112 void
redirect_edge_var_map_dup(edge newe,edge olde)113 redirect_edge_var_map_dup (edge newe, edge olde)
114 {
115   void **new_slot, **old_slot;
116   edge_var_map_vector head;
117 
118   if (!edge_var_maps)
119     return;
120 
121   new_slot = pointer_map_insert (edge_var_maps, newe);
122   old_slot = pointer_map_contains (edge_var_maps, olde);
123   if (!old_slot)
124     return;
125   head = (edge_var_map_vector) *old_slot;
126 
127   if (head)
128     *new_slot = VEC_copy (edge_var_map, heap, head);
129   else
130     *new_slot = VEC_alloc (edge_var_map, heap, 5);
131 }
132 
133 
134 /* Return the variable mappings for a given edge.  If there is none, return
135    NULL.  */
136 
137 edge_var_map_vector
redirect_edge_var_map_vector(edge e)138 redirect_edge_var_map_vector (edge e)
139 {
140   void **slot;
141 
142   /* Hey, what kind of idiot would... you'd be surprised.  */
143   if (!edge_var_maps)
144     return NULL;
145 
146   slot = pointer_map_contains (edge_var_maps, e);
147   if (!slot)
148     return NULL;
149 
150   return (edge_var_map_vector) *slot;
151 }
152 
153 /* Used by redirect_edge_var_map_destroy to free all memory.  */
154 
155 static bool
free_var_map_entry(const void * key ATTRIBUTE_UNUSED,void ** value,void * data ATTRIBUTE_UNUSED)156 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
157 		    void **value,
158 		    void *data ATTRIBUTE_UNUSED)
159 {
160   edge_var_map_vector head = (edge_var_map_vector) *value;
161   VEC_free (edge_var_map, heap, head);
162   return true;
163 }
164 
165 /* Clear the edge variable mappings.  */
166 
167 void
redirect_edge_var_map_destroy(void)168 redirect_edge_var_map_destroy (void)
169 {
170   if (edge_var_maps)
171     {
172       pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
173       pointer_map_destroy (edge_var_maps);
174       edge_var_maps = NULL;
175     }
176 }
177 
178 
179 /* Remove the corresponding arguments from the PHI nodes in E's
180    destination block and redirect it to DEST.  Return redirected edge.
181    The list of removed arguments is stored in a vector accessed
182    through edge_var_maps.  */
183 
184 edge
ssa_redirect_edge(edge e,basic_block dest)185 ssa_redirect_edge (edge e, basic_block dest)
186 {
187   gimple_stmt_iterator gsi;
188   gimple phi;
189 
190   redirect_edge_var_map_clear (e);
191 
192   /* Remove the appropriate PHI arguments in E's destination block.  */
193   for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
194     {
195       tree def;
196       source_location locus ;
197 
198       phi = gsi_stmt (gsi);
199       def = gimple_phi_arg_def (phi, e->dest_idx);
200       locus = gimple_phi_arg_location (phi, e->dest_idx);
201 
202       if (def == NULL_TREE)
203 	continue;
204 
205       redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
206     }
207 
208   e = redirect_edge_succ_nodup (e, dest);
209 
210   return e;
211 }
212 
213 
214 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
215    E->dest.  */
216 
217 void
flush_pending_stmts(edge e)218 flush_pending_stmts (edge e)
219 {
220   gimple phi;
221   edge_var_map_vector v;
222   edge_var_map *vm;
223   int i;
224   gimple_stmt_iterator gsi;
225 
226   v = redirect_edge_var_map_vector (e);
227   if (!v)
228     return;
229 
230   for (gsi = gsi_start_phis (e->dest), i = 0;
231        !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
232        gsi_next (&gsi), i++)
233     {
234       tree def;
235 
236       phi = gsi_stmt (gsi);
237       def = redirect_edge_var_map_def (vm);
238       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
239     }
240 
241   redirect_edge_var_map_clear (e);
242 }
243 
244 /* Given a tree for an expression for which we might want to emit
245    locations or values in debug information (generally a variable, but
246    we might deal with other kinds of trees in the future), return the
247    tree that should be used as the variable of a DEBUG_BIND STMT or
248    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
249 
250 tree
target_for_debug_bind(tree var)251 target_for_debug_bind (tree var)
252 {
253   if (!MAY_HAVE_DEBUG_STMTS)
254     return NULL_TREE;
255 
256   if (TREE_CODE (var) != VAR_DECL
257       && TREE_CODE (var) != PARM_DECL)
258     return NULL_TREE;
259 
260   if (DECL_HAS_VALUE_EXPR_P (var))
261     return target_for_debug_bind (DECL_VALUE_EXPR (var));
262 
263   if (DECL_IGNORED_P (var))
264     return NULL_TREE;
265 
266   if (!is_gimple_reg (var))
267     {
268       if (is_gimple_reg_type (TREE_TYPE (var))
269 	  && referenced_var_lookup (cfun, DECL_UID (var)) == NULL_TREE)
270 	return var;
271       return NULL_TREE;
272     }
273 
274   return var;
275 }
276 
277 /* Called via walk_tree, look for SSA_NAMEs that have already been
278    released.  */
279 
280 static tree
find_released_ssa_name(tree * tp,int * walk_subtrees,void * data_)281 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
282 {
283   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
284 
285   if (wi && wi->is_lhs)
286     return NULL_TREE;
287 
288   if (TREE_CODE (*tp) == SSA_NAME)
289     {
290       if (SSA_NAME_IN_FREE_LIST (*tp))
291 	return *tp;
292 
293       *walk_subtrees = 0;
294     }
295   else if (IS_TYPE_OR_DECL_P (*tp))
296     *walk_subtrees = 0;
297 
298   return NULL_TREE;
299 }
300 
301 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
302    by other DEBUG stmts, and replace uses of the DEF with the
303    newly-created debug temp.  */
304 
305 void
insert_debug_temp_for_var_def(gimple_stmt_iterator * gsi,tree var)306 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
307 {
308   imm_use_iterator imm_iter;
309   use_operand_p use_p;
310   gimple stmt;
311   gimple def_stmt = NULL;
312   int usecount = 0;
313   tree value = NULL;
314 
315   if (!MAY_HAVE_DEBUG_STMTS)
316     return;
317 
318   /* If this name has already been registered for replacement, do nothing
319      as anything that uses this name isn't in SSA form.  */
320   if (name_registered_for_update_p (var))
321     return;
322 
323   /* Check whether there are debug stmts that reference this variable and,
324      if there are, decide whether we should use a debug temp.  */
325   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
326     {
327       stmt = USE_STMT (use_p);
328 
329       if (!gimple_debug_bind_p (stmt))
330 	continue;
331 
332       if (usecount++)
333 	break;
334 
335       if (gimple_debug_bind_get_value (stmt) != var)
336 	{
337 	  /* Count this as an additional use, so as to make sure we
338 	     use a temp unless VAR's definition has a SINGLE_RHS that
339 	     can be shared.  */
340 	  usecount++;
341 	  break;
342 	}
343     }
344 
345   if (!usecount)
346     return;
347 
348   if (gsi)
349     def_stmt = gsi_stmt (*gsi);
350   else
351     def_stmt = SSA_NAME_DEF_STMT (var);
352 
353   /* If we didn't get an insertion point, and the stmt has already
354      been removed, we won't be able to insert the debug bind stmt, so
355      we'll have to drop debug information.  */
356   if (gimple_code (def_stmt) == GIMPLE_PHI)
357     {
358       value = degenerate_phi_result (def_stmt);
359       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
360 	value = NULL;
361       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
362 	 to.  */
363       else if (value == error_mark_node)
364 	value = NULL;
365     }
366   else if (is_gimple_assign (def_stmt))
367     {
368       bool no_value = false;
369 
370       if (!dom_info_available_p (CDI_DOMINATORS))
371 	{
372 	  struct walk_stmt_info wi;
373 
374 	  memset (&wi, 0, sizeof (wi));
375 
376 	  /* When removing blocks without following reverse dominance
377 	     order, we may sometimes encounter SSA_NAMEs that have
378 	     already been released, referenced in other SSA_DEFs that
379 	     we're about to release.  Consider:
380 
381 	     <bb X>:
382 	     v_1 = foo;
383 
384 	     <bb Y>:
385 	     w_2 = v_1 + bar;
386 	     # DEBUG w => w_2
387 
388 	     If we deleted BB X first, propagating the value of w_2
389 	     won't do us any good.  It's too late to recover their
390 	     original definition of v_1: when it was deleted, it was
391 	     only referenced in other DEFs, it couldn't possibly know
392 	     it should have been retained, and propagating every
393 	     single DEF just in case it might have to be propagated
394 	     into a DEBUG STMT would probably be too wasteful.
395 
396 	     When dominator information is not readily available, we
397 	     check for and accept some loss of debug information.  But
398 	     if it is available, there's no excuse for us to remove
399 	     blocks in the wrong order, so we don't even check for
400 	     dead SSA NAMEs.  SSA verification shall catch any
401 	     errors.  */
402 	  if ((!gsi && !gimple_bb (def_stmt))
403 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
404 	    no_value = true;
405 	}
406 
407       if (!no_value)
408 	value = gimple_assign_rhs_to_tree (def_stmt);
409     }
410 
411   if (value)
412     {
413       /* If there's a single use of VAR, and VAR is the entire debug
414 	 expression (usecount would have been incremented again
415 	 otherwise), and the definition involves only constants and
416 	 SSA names, then we can propagate VALUE into this single use,
417 	 avoiding the temp.
418 
419 	 We can also avoid using a temp if VALUE can be shared and
420 	 propagated into all uses, without generating expressions that
421 	 wouldn't be valid gimple RHSs.
422 
423 	 Other cases that would require unsharing or non-gimple RHSs
424 	 are deferred to a debug temp, although we could avoid temps
425 	 at the expense of duplication of expressions.  */
426 
427       if (CONSTANT_CLASS_P (value)
428 	  || gimple_code (def_stmt) == GIMPLE_PHI
429 	  || (usecount == 1
430 	      && (!gimple_assign_single_p (def_stmt)
431 		  || is_gimple_min_invariant (value)))
432 	  || is_gimple_reg (value))
433 	value = unshare_expr (value);
434       else
435 	{
436 	  gimple def_temp;
437 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
438 
439 	  def_temp = gimple_build_debug_bind (vexpr,
440 					      unshare_expr (value),
441 					      def_stmt);
442 
443 	  DECL_ARTIFICIAL (vexpr) = 1;
444 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
445 	  if (DECL_P (value))
446 	    DECL_MODE (vexpr) = DECL_MODE (value);
447 	  else
448 	    DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
449 
450 	  if (gsi)
451 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
452 	  else
453 	    {
454 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
455 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
456 	    }
457 
458 	  value = vexpr;
459 	}
460     }
461 
462   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
463     {
464       if (!gimple_debug_bind_p (stmt))
465 	continue;
466 
467       if (value)
468 	{
469 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
470 	    /* unshare_expr is not needed here.  vexpr is either a
471 	       SINGLE_RHS, that can be safely shared, some other RHS
472 	       that was unshared when we found it had a single debug
473 	       use, or a DEBUG_EXPR_DECL, that can be safely
474 	       shared.  */
475 	    SET_USE (use_p, value);
476 	  /* If we didn't replace uses with a debug decl fold the
477 	     resulting expression.  Otherwise we end up with invalid IL.  */
478 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
479 	    {
480 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
481 	      fold_stmt_inplace (&gsi);
482 	    }
483 	}
484       else
485 	gimple_debug_bind_reset_value (stmt);
486 
487       update_stmt (stmt);
488     }
489 }
490 
491 
492 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
493    other DEBUG stmts, and replace uses of the DEF with the
494    newly-created debug temp.  */
495 
496 void
insert_debug_temps_for_defs(gimple_stmt_iterator * gsi)497 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
498 {
499   gimple stmt;
500   ssa_op_iter op_iter;
501   def_operand_p def_p;
502 
503   if (!MAY_HAVE_DEBUG_STMTS)
504     return;
505 
506   stmt = gsi_stmt (*gsi);
507 
508   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
509     {
510       tree var = DEF_FROM_PTR (def_p);
511 
512       if (TREE_CODE (var) != SSA_NAME)
513 	continue;
514 
515       insert_debug_temp_for_var_def (gsi, var);
516     }
517 }
518 
519 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
520 
521 void
reset_debug_uses(gimple stmt)522 reset_debug_uses (gimple stmt)
523 {
524   ssa_op_iter op_iter;
525   def_operand_p def_p;
526   imm_use_iterator imm_iter;
527   gimple use_stmt;
528 
529   if (!MAY_HAVE_DEBUG_STMTS)
530     return;
531 
532   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
533     {
534       tree var = DEF_FROM_PTR (def_p);
535 
536       if (TREE_CODE (var) != SSA_NAME)
537 	continue;
538 
539       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
540 	{
541 	  if (!gimple_debug_bind_p (use_stmt))
542 	    continue;
543 
544 	  gimple_debug_bind_reset_value (use_stmt);
545 	  update_stmt (use_stmt);
546 	}
547     }
548 }
549 
550 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
551    dominated stmts before their dominators, so that release_ssa_defs
552    stands a chance of propagating DEFs into debug bind stmts.  */
553 
554 void
release_defs_bitset(bitmap toremove)555 release_defs_bitset (bitmap toremove)
556 {
557   unsigned j;
558   bitmap_iterator bi;
559 
560   /* Performing a topological sort is probably overkill, this will
561      most likely run in slightly superlinear time, rather than the
562      pathological quadratic worst case.  */
563   while (!bitmap_empty_p (toremove))
564     EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
565       {
566 	bool remove_now = true;
567 	tree var = ssa_name (j);
568 	gimple stmt;
569 	imm_use_iterator uit;
570 
571 	FOR_EACH_IMM_USE_STMT (stmt, uit, var)
572 	  {
573 	    ssa_op_iter dit;
574 	    def_operand_p def_p;
575 
576 	    /* We can't propagate PHI nodes into debug stmts.  */
577 	    if (gimple_code (stmt) == GIMPLE_PHI
578 		|| is_gimple_debug (stmt))
579 	      continue;
580 
581 	    /* If we find another definition to remove that uses
582 	       the one we're looking at, defer the removal of this
583 	       one, so that it can be propagated into debug stmts
584 	       after the other is.  */
585 	    FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
586 	      {
587 		tree odef = DEF_FROM_PTR (def_p);
588 
589 		if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
590 		  {
591 		    remove_now = false;
592 		    break;
593 		  }
594 	      }
595 
596 	    if (!remove_now)
597 	      BREAK_FROM_IMM_USE_STMT (uit);
598 	  }
599 
600 	if (remove_now)
601 	  {
602 	    gimple def = SSA_NAME_DEF_STMT (var);
603 	    gimple_stmt_iterator gsi = gsi_for_stmt (def);
604 
605 	    if (gimple_code (def) == GIMPLE_PHI)
606 	      remove_phi_node (&gsi, true);
607 	    else
608 	      {
609 		gsi_remove (&gsi, true);
610 		release_defs (def);
611 	      }
612 
613 	    bitmap_clear_bit (toremove, j);
614 	  }
615       }
616 }
617 
618 /* Return true if SSA_NAME is malformed and mark it visited.
619 
620    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
621       operand.  */
622 
623 static bool
verify_ssa_name(tree ssa_name,bool is_virtual)624 verify_ssa_name (tree ssa_name, bool is_virtual)
625 {
626   if (TREE_CODE (ssa_name) != SSA_NAME)
627     {
628       error ("expected an SSA_NAME object");
629       return true;
630     }
631 
632   if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
633     {
634       error ("type mismatch between an SSA_NAME and its symbol");
635       return true;
636     }
637 
638   if (SSA_NAME_IN_FREE_LIST (ssa_name))
639     {
640       error ("found an SSA_NAME that had been released into the free pool");
641       return true;
642     }
643 
644   if (is_virtual && is_gimple_reg (ssa_name))
645     {
646       error ("found a virtual definition for a GIMPLE register");
647       return true;
648     }
649 
650   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
651     {
652       error ("virtual SSA name for non-VOP decl");
653       return true;
654     }
655 
656   if (!is_virtual && !is_gimple_reg (ssa_name))
657     {
658       error ("found a real definition for a non-register");
659       return true;
660     }
661 
662   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
663       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
664     {
665       error ("found a default name with a non-empty defining statement");
666       return true;
667     }
668 
669   return false;
670 }
671 
672 
673 /* Return true if the definition of SSA_NAME at block BB is malformed.
674 
675    STMT is the statement where SSA_NAME is created.
676 
677    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
678       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
679       it means that the block in that array slot contains the
680       definition of SSA_NAME.
681 
682    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
683 
684 static bool
verify_def(basic_block bb,basic_block * definition_block,tree ssa_name,gimple stmt,bool is_virtual)685 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
686 	    gimple stmt, bool is_virtual)
687 {
688   if (verify_ssa_name (ssa_name, is_virtual))
689     goto err;
690 
691   if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
692       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
693     {
694       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
695       goto err;
696     }
697 
698   if (definition_block[SSA_NAME_VERSION (ssa_name)])
699     {
700       error ("SSA_NAME created in two different blocks %i and %i",
701 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
702       goto err;
703     }
704 
705   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
706 
707   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
708     {
709       error ("SSA_NAME_DEF_STMT is wrong");
710       fprintf (stderr, "Expected definition statement:\n");
711       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
712       fprintf (stderr, "\nActual definition statement:\n");
713       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
714       goto err;
715     }
716 
717   return false;
718 
719 err:
720   fprintf (stderr, "while verifying SSA_NAME ");
721   print_generic_expr (stderr, ssa_name, 0);
722   fprintf (stderr, " in statement\n");
723   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
724 
725   return true;
726 }
727 
728 
729 /* Return true if the use of SSA_NAME at statement STMT in block BB is
730    malformed.
731 
732    DEF_BB is the block where SSA_NAME was found to be created.
733 
734    IDOM contains immediate dominator information for the flowgraph.
735 
736    CHECK_ABNORMAL is true if the caller wants to check whether this use
737       is flowing through an abnormal edge (only used when checking PHI
738       arguments).
739 
740    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
741      that are defined before STMT in basic block BB.  */
742 
743 static bool
verify_use(basic_block bb,basic_block def_bb,use_operand_p use_p,gimple stmt,bool check_abnormal,bitmap names_defined_in_bb)744 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
745 	    gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
746 {
747   bool err = false;
748   tree ssa_name = USE_FROM_PTR (use_p);
749 
750   if (!TREE_VISITED (ssa_name))
751     if (verify_imm_links (stderr, ssa_name))
752       err = true;
753 
754   TREE_VISITED (ssa_name) = 1;
755 
756   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
757       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
758     ; /* Default definitions have empty statements.  Nothing to do.  */
759   else if (!def_bb)
760     {
761       error ("missing definition");
762       err = true;
763     }
764   else if (bb != def_bb
765 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
766     {
767       error ("definition in block %i does not dominate use in block %i",
768 	     def_bb->index, bb->index);
769       err = true;
770     }
771   else if (bb == def_bb
772 	   && names_defined_in_bb != NULL
773 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
774     {
775       error ("definition in block %i follows the use", def_bb->index);
776       err = true;
777     }
778 
779   if (check_abnormal
780       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
781     {
782       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
783       err = true;
784     }
785 
786   /* Make sure the use is in an appropriate list by checking the previous
787      element to make sure it's the same.  */
788   if (use_p->prev == NULL)
789     {
790       error ("no immediate_use list");
791       err = true;
792     }
793   else
794     {
795       tree listvar;
796       if (use_p->prev->use == NULL)
797 	listvar = use_p->prev->loc.ssa_name;
798       else
799 	listvar = USE_FROM_PTR (use_p->prev);
800       if (listvar != ssa_name)
801         {
802 	  error ("wrong immediate use list");
803 	  err = true;
804 	}
805     }
806 
807   if (err)
808     {
809       fprintf (stderr, "for SSA_NAME: ");
810       print_generic_expr (stderr, ssa_name, TDF_VOPS);
811       fprintf (stderr, " in statement:\n");
812       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
813     }
814 
815   return err;
816 }
817 
818 
819 /* Return true if any of the arguments for PHI node PHI at block BB is
820    malformed.
821 
822    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
823       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
824       it means that the block in that array slot contains the
825       definition of SSA_NAME.  */
826 
827 static bool
verify_phi_args(gimple phi,basic_block bb,basic_block * definition_block)828 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
829 {
830   edge e;
831   bool err = false;
832   size_t i, phi_num_args = gimple_phi_num_args (phi);
833 
834   if (EDGE_COUNT (bb->preds) != phi_num_args)
835     {
836       error ("incoming edge count does not match number of PHI arguments");
837       err = true;
838       goto error;
839     }
840 
841   for (i = 0; i < phi_num_args; i++)
842     {
843       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
844       tree op = USE_FROM_PTR (op_p);
845 
846       e = EDGE_PRED (bb, i);
847 
848       if (op == NULL_TREE)
849 	{
850 	  error ("PHI argument is missing for edge %d->%d",
851 	         e->src->index,
852 		 e->dest->index);
853 	  err = true;
854 	  goto error;
855 	}
856 
857       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
858 	{
859 	  error ("PHI argument is not SSA_NAME, or invariant");
860 	  err = true;
861 	}
862 
863       if (TREE_CODE (op) == SSA_NAME)
864 	{
865 	  err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
866 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
867 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
868 	}
869 
870       if (TREE_CODE (op) == ADDR_EXPR)
871 	{
872 	  tree base = TREE_OPERAND (op, 0);
873 	  while (handled_component_p (base))
874 	    base = TREE_OPERAND (base, 0);
875 	  if ((TREE_CODE (base) == VAR_DECL
876 	       || TREE_CODE (base) == PARM_DECL
877 	       || TREE_CODE (base) == RESULT_DECL)
878 	      && !TREE_ADDRESSABLE (base))
879 	    {
880 	      error ("address taken, but ADDRESSABLE bit not set");
881 	      err = true;
882 	    }
883 	}
884 
885       if (e->dest != bb)
886 	{
887 	  error ("wrong edge %d->%d for PHI argument",
888 	         e->src->index, e->dest->index);
889 	  err = true;
890 	}
891 
892       if (err)
893 	{
894 	  fprintf (stderr, "PHI argument\n");
895 	  print_generic_stmt (stderr, op, TDF_VOPS);
896 	  goto error;
897 	}
898     }
899 
900 error:
901   if (err)
902     {
903       fprintf (stderr, "for PHI node\n");
904       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
905     }
906 
907 
908   return err;
909 }
910 
911 
912 /* Verify common invariants in the SSA web.
913    TODO: verify the variable annotations.  */
914 
915 DEBUG_FUNCTION void
verify_ssa(bool check_modified_stmt)916 verify_ssa (bool check_modified_stmt)
917 {
918   size_t i;
919   basic_block bb;
920   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
921   ssa_op_iter iter;
922   tree op;
923   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
924   bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
925 
926   gcc_assert (!need_ssa_update_p (cfun));
927 
928   timevar_push (TV_TREE_SSA_VERIFY);
929 
930   /* Keep track of SSA names present in the IL.  */
931   for (i = 1; i < num_ssa_names; i++)
932     {
933       tree name = ssa_name (i);
934       if (name)
935 	{
936 	  gimple stmt;
937 	  TREE_VISITED (name) = 0;
938 
939 	  verify_ssa_name (name, !is_gimple_reg (name));
940 
941 	  stmt = SSA_NAME_DEF_STMT (name);
942 	  if (!gimple_nop_p (stmt))
943 	    {
944 	      basic_block bb = gimple_bb (stmt);
945 	      verify_def (bb, definition_block,
946 			  name, stmt, !is_gimple_reg (name));
947 
948 	    }
949 	}
950     }
951 
952   calculate_dominance_info (CDI_DOMINATORS);
953 
954   /* Now verify all the uses and make sure they agree with the definitions
955      found in the previous pass.  */
956   FOR_EACH_BB (bb)
957     {
958       edge e;
959       gimple phi;
960       edge_iterator ei;
961       gimple_stmt_iterator gsi;
962 
963       /* Make sure that all edges have a clear 'aux' field.  */
964       FOR_EACH_EDGE (e, ei, bb->preds)
965 	{
966 	  if (e->aux)
967 	    {
968 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
969 		      e->dest->index);
970 	      goto err;
971 	    }
972 	}
973 
974       /* Verify the arguments for every PHI node in the block.  */
975       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
976 	{
977 	  phi = gsi_stmt (gsi);
978 	  if (verify_phi_args (phi, bb, definition_block))
979 	    goto err;
980 
981 	  bitmap_set_bit (names_defined_in_bb,
982 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
983 	}
984 
985       /* Now verify all the uses and vuses in every statement of the block.  */
986       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
987 	{
988 	  gimple stmt = gsi_stmt (gsi);
989 	  use_operand_p use_p;
990 
991 	  if (check_modified_stmt && gimple_modified_p (stmt))
992 	    {
993 	      error ("stmt (%p) marked modified after optimization pass: ",
994 		     (void *)stmt);
995 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
996 	      goto err;
997 	    }
998 
999 	  if (verify_ssa_operands (stmt))
1000 	    {
1001 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1002 	      goto err;
1003 	    }
1004 
1005 	  if (gimple_debug_bind_p (stmt)
1006 	      && !gimple_debug_bind_has_value_p (stmt))
1007 	    continue;
1008 
1009 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1010 	    {
1011 	      op = USE_FROM_PTR (use_p);
1012 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1013 			      use_p, stmt, false, names_defined_in_bb))
1014 		goto err;
1015 	    }
1016 
1017 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1018 	    {
1019 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1020 		{
1021 		  error ("SSA_NAME_DEF_STMT is wrong");
1022 		  fprintf (stderr, "Expected definition statement:\n");
1023 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1024 		  fprintf (stderr, "\nActual definition statement:\n");
1025 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1026 				     4, TDF_VOPS);
1027 		  goto err;
1028 		}
1029 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1030 	    }
1031 	}
1032 
1033       bitmap_clear (names_defined_in_bb);
1034     }
1035 
1036   free (definition_block);
1037 
1038   /* Restore the dominance information to its prior known state, so
1039      that we do not perturb the compiler's subsequent behavior.  */
1040   if (orig_dom_state == DOM_NONE)
1041     free_dominance_info (CDI_DOMINATORS);
1042   else
1043     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1044 
1045   BITMAP_FREE (names_defined_in_bb);
1046   timevar_pop (TV_TREE_SSA_VERIFY);
1047   return;
1048 
1049 err:
1050   internal_error ("verify_ssa failed");
1051 }
1052 
1053 /* Return true if the uid in both int tree maps are equal.  */
1054 
1055 int
int_tree_map_eq(const void * va,const void * vb)1056 int_tree_map_eq (const void *va, const void *vb)
1057 {
1058   const struct int_tree_map *a = (const struct int_tree_map *) va;
1059   const struct int_tree_map *b = (const struct int_tree_map *) vb;
1060   return (a->uid == b->uid);
1061 }
1062 
1063 /* Hash a UID in a int_tree_map.  */
1064 
1065 unsigned int
int_tree_map_hash(const void * item)1066 int_tree_map_hash (const void *item)
1067 {
1068   return ((const struct int_tree_map *)item)->uid;
1069 }
1070 
1071 /* Return true if the DECL_UID in both trees are equal.  */
1072 
1073 int
uid_decl_map_eq(const void * va,const void * vb)1074 uid_decl_map_eq (const void *va, const void *vb)
1075 {
1076   const_tree a = (const_tree) va;
1077   const_tree b = (const_tree) vb;
1078   return (a->decl_minimal.uid == b->decl_minimal.uid);
1079 }
1080 
1081 /* Hash a tree in a uid_decl_map.  */
1082 
1083 unsigned int
uid_decl_map_hash(const void * item)1084 uid_decl_map_hash (const void *item)
1085 {
1086   return ((const_tree)item)->decl_minimal.uid;
1087 }
1088 
1089 /* Return true if the DECL_UID in both trees are equal.  */
1090 
1091 static int
uid_ssaname_map_eq(const void * va,const void * vb)1092 uid_ssaname_map_eq (const void *va, const void *vb)
1093 {
1094   const_tree a = (const_tree) va;
1095   const_tree b = (const_tree) vb;
1096   return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1097 }
1098 
1099 /* Hash a tree in a uid_decl_map.  */
1100 
1101 static unsigned int
uid_ssaname_map_hash(const void * item)1102 uid_ssaname_map_hash (const void *item)
1103 {
1104   return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1105 }
1106 
1107 
1108 /* Initialize global DFA and SSA structures.  */
1109 
1110 void
init_tree_ssa(struct function * fn)1111 init_tree_ssa (struct function *fn)
1112 {
1113   fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1114   fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
1115 				     		    uid_decl_map_eq, NULL);
1116   fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1117 				                 uid_ssaname_map_eq, NULL);
1118   pt_solution_reset (&fn->gimple_df->escaped);
1119   init_ssanames (fn, 0);
1120   init_phinodes ();
1121 }
1122 
1123 
1124 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1125 
1126 void
delete_tree_ssa(void)1127 delete_tree_ssa (void)
1128 {
1129   referenced_var_iterator rvi;
1130   tree var;
1131 
1132   /* Remove annotations from every referenced local variable.  */
1133   FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1134     {
1135       if (is_global_var (var))
1136 	continue;
1137       if (var_ann (var))
1138 	{
1139 	  ggc_free (var_ann (var));
1140 	  *DECL_VAR_ANN_PTR (var) = NULL;
1141 	}
1142     }
1143   htab_delete (gimple_referenced_vars (cfun));
1144   cfun->gimple_df->referenced_vars = NULL;
1145 
1146   fini_ssanames ();
1147   fini_phinodes ();
1148 
1149   /* We no longer maintain the SSA operand cache at this point.  */
1150   if (ssa_operands_active ())
1151     fini_ssa_operands ();
1152 
1153   htab_delete (cfun->gimple_df->default_defs);
1154   cfun->gimple_df->default_defs = NULL;
1155   pt_solution_reset (&cfun->gimple_df->escaped);
1156   if (cfun->gimple_df->decls_to_pointers != NULL)
1157     pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1158   cfun->gimple_df->decls_to_pointers = NULL;
1159   cfun->gimple_df->modified_noreturn_calls = NULL;
1160   cfun->gimple_df = NULL;
1161 
1162   /* We no longer need the edge variable maps.  */
1163   redirect_edge_var_map_destroy ();
1164 }
1165 
1166 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1167    useless type conversion, otherwise return false.
1168 
1169    This function implicitly defines the middle-end type system.  With
1170    the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1171    holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1172    the following invariants shall be fulfilled:
1173 
1174      1) useless_type_conversion_p is transitive.
1175 	If a < b and b < c then a < c.
1176 
1177      2) useless_type_conversion_p is not symmetric.
1178 	From a < b does not follow a > b.
1179 
1180      3) Types define the available set of operations applicable to values.
1181 	A type conversion is useless if the operations for the target type
1182 	is a subset of the operations for the source type.  For example
1183 	casts to void* are useless, casts from void* are not (void* can't
1184 	be dereferenced or offsetted, but copied, hence its set of operations
1185 	is a strict subset of that of all other data pointer types).  Casts
1186 	to const T* are useless (can't be written to), casts from const T*
1187 	to T* are not.  */
1188 
1189 bool
useless_type_conversion_p(tree outer_type,tree inner_type)1190 useless_type_conversion_p (tree outer_type, tree inner_type)
1191 {
1192   /* Do the following before stripping toplevel qualifiers.  */
1193   if (POINTER_TYPE_P (inner_type)
1194       && POINTER_TYPE_P (outer_type))
1195     {
1196       /* Do not lose casts between pointers to different address spaces.  */
1197       if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
1198 	  != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
1199 	return false;
1200     }
1201 
1202   /* From now on qualifiers on value types do not matter.  */
1203   inner_type = TYPE_MAIN_VARIANT (inner_type);
1204   outer_type = TYPE_MAIN_VARIANT (outer_type);
1205 
1206   if (inner_type == outer_type)
1207     return true;
1208 
1209   /* If we know the canonical types, compare them.  */
1210   if (TYPE_CANONICAL (inner_type)
1211       && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1212     return true;
1213 
1214   /* Changes in machine mode are never useless conversions unless we
1215      deal with aggregate types in which case we defer to later checks.  */
1216   if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1217       && !AGGREGATE_TYPE_P (inner_type))
1218     return false;
1219 
1220   /* If both the inner and outer types are integral types, then the
1221      conversion is not necessary if they have the same mode and
1222      signedness and precision, and both or neither are boolean.  */
1223   if (INTEGRAL_TYPE_P (inner_type)
1224       && INTEGRAL_TYPE_P (outer_type))
1225     {
1226       /* Preserve changes in signedness or precision.  */
1227       if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1228 	  || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1229 	return false;
1230 
1231       /* Preserve conversions to/from BOOLEAN_TYPE if types are not
1232 	 of precision one.  */
1233       if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
1234 	   != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
1235 	  && TYPE_PRECISION (outer_type) != 1)
1236 	return false;
1237 
1238       /* We don't need to preserve changes in the types minimum or
1239 	 maximum value in general as these do not generate code
1240 	 unless the types precisions are different.  */
1241       return true;
1242     }
1243 
1244   /* Scalar floating point types with the same mode are compatible.  */
1245   else if (SCALAR_FLOAT_TYPE_P (inner_type)
1246 	   && SCALAR_FLOAT_TYPE_P (outer_type))
1247     return true;
1248 
1249   /* Fixed point types with the same mode are compatible.  */
1250   else if (FIXED_POINT_TYPE_P (inner_type)
1251 	   && FIXED_POINT_TYPE_P (outer_type))
1252     return true;
1253 
1254   /* We need to take special care recursing to pointed-to types.  */
1255   else if (POINTER_TYPE_P (inner_type)
1256 	   && POINTER_TYPE_P (outer_type))
1257     {
1258       /* Do not lose casts to function pointer types.  */
1259       if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1260 	   || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
1261 	  && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
1262 	       || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
1263 	return false;
1264 
1265       /* We do not care for const qualification of the pointed-to types
1266 	 as const qualification has no semantic value to the middle-end.  */
1267 
1268       /* Otherwise pointers/references are equivalent.  */
1269       return true;
1270     }
1271 
1272   /* Recurse for complex types.  */
1273   else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1274 	   && TREE_CODE (outer_type) == COMPLEX_TYPE)
1275     return useless_type_conversion_p (TREE_TYPE (outer_type),
1276 				      TREE_TYPE (inner_type));
1277 
1278   /* Recurse for vector types with the same number of subparts.  */
1279   else if (TREE_CODE (inner_type) == VECTOR_TYPE
1280 	   && TREE_CODE (outer_type) == VECTOR_TYPE
1281 	   && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1282     return useless_type_conversion_p (TREE_TYPE (outer_type),
1283 				      TREE_TYPE (inner_type));
1284 
1285   else if (TREE_CODE (inner_type) == ARRAY_TYPE
1286 	   && TREE_CODE (outer_type) == ARRAY_TYPE)
1287     {
1288       /* Preserve string attributes.  */
1289       if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1290 	return false;
1291 
1292       /* Conversions from array types with unknown extent to
1293 	 array types with known extent are not useless.  */
1294       if (!TYPE_DOMAIN (inner_type)
1295 	  && TYPE_DOMAIN (outer_type))
1296 	return false;
1297 
1298       /* Nor are conversions from array types with non-constant size to
1299          array types with constant size or to different size.  */
1300       if (TYPE_SIZE (outer_type)
1301 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1302 	  && (!TYPE_SIZE (inner_type)
1303 	      || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1304 	      || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1305 				      TYPE_SIZE (inner_type))))
1306 	return false;
1307 
1308       /* Check conversions between arrays with partially known extents.
1309 	 If the array min/max values are constant they have to match.
1310 	 Otherwise allow conversions to unknown and variable extents.
1311 	 In particular this declares conversions that may change the
1312 	 mode to BLKmode as useless.  */
1313       if (TYPE_DOMAIN (inner_type)
1314 	  && TYPE_DOMAIN (outer_type)
1315 	  && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1316 	{
1317 	  tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1318 	  tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1319 	  tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1320 	  tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1321 
1322 	  /* After gimplification a variable min/max value carries no
1323 	     additional information compared to a NULL value.  All that
1324 	     matters has been lowered to be part of the IL.  */
1325 	  if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1326 	    inner_min = NULL_TREE;
1327 	  if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1328 	    outer_min = NULL_TREE;
1329 	  if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1330 	    inner_max = NULL_TREE;
1331 	  if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1332 	    outer_max = NULL_TREE;
1333 
1334 	  /* Conversions NULL / variable <- cst are useless, but not
1335 	     the other way around.  */
1336 	  if (outer_min
1337 	      && (!inner_min
1338 		  || !tree_int_cst_equal (inner_min, outer_min)))
1339 	    return false;
1340 	  if (outer_max
1341 	      && (!inner_max
1342 		  || !tree_int_cst_equal (inner_max, outer_max)))
1343 	    return false;
1344 	}
1345 
1346       /* Recurse on the element check.  */
1347       return useless_type_conversion_p (TREE_TYPE (outer_type),
1348 					TREE_TYPE (inner_type));
1349     }
1350 
1351   else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1352 	    || TREE_CODE (inner_type) == METHOD_TYPE)
1353 	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1354     {
1355       tree outer_parm, inner_parm;
1356 
1357       /* If the return types are not compatible bail out.  */
1358       if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1359 				      TREE_TYPE (inner_type)))
1360 	return false;
1361 
1362       /* Method types should belong to a compatible base class.  */
1363       if (TREE_CODE (inner_type) == METHOD_TYPE
1364 	  && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1365 					 TYPE_METHOD_BASETYPE (inner_type)))
1366 	return false;
1367 
1368       /* A conversion to an unprototyped argument list is ok.  */
1369       if (!prototype_p (outer_type))
1370 	return true;
1371 
1372       /* If the unqualified argument types are compatible the conversion
1373 	 is useless.  */
1374       if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1375 	return true;
1376 
1377       for (outer_parm = TYPE_ARG_TYPES (outer_type),
1378 	   inner_parm = TYPE_ARG_TYPES (inner_type);
1379 	   outer_parm && inner_parm;
1380 	   outer_parm = TREE_CHAIN (outer_parm),
1381 	   inner_parm = TREE_CHAIN (inner_parm))
1382 	if (!useless_type_conversion_p
1383 	       (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1384 		TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
1385 	  return false;
1386 
1387       /* If there is a mismatch in the number of arguments the functions
1388 	 are not compatible.  */
1389       if (outer_parm || inner_parm)
1390 	return false;
1391 
1392       /* Defer to the target if necessary.  */
1393       if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1394 	return comp_type_attributes (outer_type, inner_type) != 0;
1395 
1396       return true;
1397     }
1398 
1399   /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1400      explicit conversions for types involving to be structurally
1401      compared types.  */
1402   else if (AGGREGATE_TYPE_P (inner_type)
1403 	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1404     return false;
1405 
1406   return false;
1407 }
1408 
1409 /* Return true if a conversion from either type of TYPE1 and TYPE2
1410    to the other is not required.  Otherwise return false.  */
1411 
1412 bool
types_compatible_p(tree type1,tree type2)1413 types_compatible_p (tree type1, tree type2)
1414 {
1415   return (type1 == type2
1416 	  || (useless_type_conversion_p (type1, type2)
1417 	      && useless_type_conversion_p (type2, type1)));
1418 }
1419 
1420 /* Return true if EXPR is a useless type conversion, otherwise return
1421    false.  */
1422 
1423 bool
tree_ssa_useless_type_conversion(tree expr)1424 tree_ssa_useless_type_conversion (tree expr)
1425 {
1426   /* If we have an assignment that merely uses a NOP_EXPR to change
1427      the top of the RHS to the type of the LHS and the type conversion
1428      is "safe", then strip away the type conversion so that we can
1429      enter LHS = RHS into the const_and_copies table.  */
1430   if (CONVERT_EXPR_P (expr)
1431       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1432       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1433     return useless_type_conversion_p
1434       (TREE_TYPE (expr),
1435        TREE_TYPE (TREE_OPERAND (expr, 0)));
1436 
1437   return false;
1438 }
1439 
1440 /* Strip conversions from EXP according to
1441    tree_ssa_useless_type_conversion and return the resulting
1442    expression.  */
1443 
1444 tree
tree_ssa_strip_useless_type_conversions(tree exp)1445 tree_ssa_strip_useless_type_conversions (tree exp)
1446 {
1447   while (tree_ssa_useless_type_conversion (exp))
1448     exp = TREE_OPERAND (exp, 0);
1449   return exp;
1450 }
1451 
1452 
1453 /* Internal helper for walk_use_def_chains.  VAR, FN and DATA are as
1454    described in walk_use_def_chains.
1455 
1456    VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1457       infinite loops.  We used to have a bitmap for this to just mark
1458       SSA versions we had visited.  But non-sparse bitmaps are way too
1459       expensive, while sparse bitmaps may cause quadratic behavior.
1460 
1461    IS_DFS is true if the caller wants to perform a depth-first search
1462       when visiting PHI nodes.  A DFS will visit each PHI argument and
1463       call FN after each one.  Otherwise, all the arguments are
1464       visited first and then FN is called with each of the visited
1465       arguments in a separate pass.  */
1466 
1467 static bool
walk_use_def_chains_1(tree var,walk_use_def_chains_fn fn,void * data,struct pointer_set_t * visited,bool is_dfs)1468 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1469 		       struct pointer_set_t *visited, bool is_dfs)
1470 {
1471   gimple def_stmt;
1472 
1473   if (pointer_set_insert (visited, var))
1474     return false;
1475 
1476   def_stmt = SSA_NAME_DEF_STMT (var);
1477 
1478   if (gimple_code (def_stmt) != GIMPLE_PHI)
1479     {
1480       /* If we reached the end of the use-def chain, call FN.  */
1481       return fn (var, def_stmt, data);
1482     }
1483   else
1484     {
1485       size_t i;
1486 
1487       /* When doing a breadth-first search, call FN before following the
1488 	 use-def links for each argument.  */
1489       if (!is_dfs)
1490 	for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1491 	  if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1492 	    return true;
1493 
1494       /* Follow use-def links out of each PHI argument.  */
1495       for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1496 	{
1497 	  tree arg = gimple_phi_arg_def (def_stmt, i);
1498 
1499 	  /* ARG may be NULL for newly introduced PHI nodes.  */
1500 	  if (arg
1501 	      && TREE_CODE (arg) == SSA_NAME
1502 	      && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1503 	    return true;
1504 	}
1505 
1506       /* When doing a depth-first search, call FN after following the
1507 	 use-def links for each argument.  */
1508       if (is_dfs)
1509 	for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1510 	  if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1511 	    return true;
1512     }
1513 
1514   return false;
1515 }
1516 
1517 
1518 
1519 /* Walk use-def chains starting at the SSA variable VAR.  Call
1520    function FN at each reaching definition found.  FN takes three
1521    arguments: VAR, its defining statement (DEF_STMT) and a generic
1522    pointer to whatever state information that FN may want to maintain
1523    (DATA).  FN is able to stop the walk by returning true, otherwise
1524    in order to continue the walk, FN should return false.
1525 
1526    Note, that if DEF_STMT is a PHI node, the semantics are slightly
1527    different.  The first argument to FN is no longer the original
1528    variable VAR, but the PHI argument currently being examined.  If FN
1529    wants to get at VAR, it should call PHI_RESULT (PHI).
1530 
1531    If IS_DFS is true, this function will:
1532 
1533 	1- walk the use-def chains for all the PHI arguments, and,
1534 	2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1535 
1536    If IS_DFS is false, the two steps above are done in reverse order
1537    (i.e., a breadth-first search).  */
1538 
1539 void
walk_use_def_chains(tree var,walk_use_def_chains_fn fn,void * data,bool is_dfs)1540 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1541                      bool is_dfs)
1542 {
1543   gimple def_stmt;
1544 
1545   gcc_assert (TREE_CODE (var) == SSA_NAME);
1546 
1547   def_stmt = SSA_NAME_DEF_STMT (var);
1548 
1549   /* We only need to recurse if the reaching definition comes from a PHI
1550      node.  */
1551   if (gimple_code (def_stmt) != GIMPLE_PHI)
1552     (*fn) (var, def_stmt, data);
1553   else
1554     {
1555       struct pointer_set_t *visited = pointer_set_create ();
1556       walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1557       pointer_set_destroy (visited);
1558     }
1559 }
1560 
1561 
1562 /* Emit warnings for uninitialized variables.  This is done in two passes.
1563 
1564    The first pass notices real uses of SSA names with undefined values.
1565    Such uses are unconditionally uninitialized, and we can be certain that
1566    such a use is a mistake.  This pass is run before most optimizations,
1567    so that we catch as many as we can.
1568 
1569    The second pass follows PHI nodes to find uses that are potentially
1570    uninitialized.  In this case we can't necessarily prove that the use
1571    is really uninitialized.  This pass is run after most optimizations,
1572    so that we thread as many jumps and possible, and delete as much dead
1573    code as possible, in order to reduce false positives.  We also look
1574    again for plain uninitialized variables, since optimization may have
1575    changed conditionally uninitialized to unconditionally uninitialized.  */
1576 
1577 /* Emit a warning for EXPR based on variable VAR at the point in the
1578    program T, an SSA_NAME, is used being uninitialized.  The exact
1579    warning text is in MSGID and LOCUS may contain a location or be null.
1580    WC is the warning code.  */
1581 
1582 void
warn_uninit(enum opt_code wc,tree t,tree expr,tree var,const char * gmsgid,void * data)1583 warn_uninit (enum opt_code wc, tree t,
1584 	     tree expr, tree var, const char *gmsgid, void *data)
1585 {
1586   gimple context = (gimple) data;
1587   location_t location;
1588   expanded_location xloc, floc;
1589 
1590   if (!ssa_undefined_value_p (t))
1591     return;
1592 
1593   /* TREE_NO_WARNING either means we already warned, or the front end
1594      wishes to suppress the warning.  */
1595   if ((context
1596        && (gimple_no_warning_p (context)
1597 	   || (gimple_assign_single_p (context)
1598 	       && TREE_NO_WARNING (gimple_assign_rhs1 (context)))))
1599       || TREE_NO_WARNING (expr))
1600     return;
1601 
1602   location = (context != NULL && gimple_has_location (context))
1603 	     ? gimple_location (context)
1604 	     : DECL_SOURCE_LOCATION (var);
1605   xloc = expand_location (location);
1606   floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1607   if (warning_at (location, wc, gmsgid, expr))
1608     {
1609       TREE_NO_WARNING (expr) = 1;
1610 
1611       if (location == DECL_SOURCE_LOCATION (var))
1612 	return;
1613       if (xloc.file != floc.file
1614 	  || xloc.line < floc.line
1615 	  || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1616 	inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1617     }
1618 }
1619 
1620 unsigned int
warn_uninitialized_vars(bool warn_possibly_uninitialized)1621 warn_uninitialized_vars (bool warn_possibly_uninitialized)
1622 {
1623   gimple_stmt_iterator gsi;
1624   basic_block bb;
1625 
1626   FOR_EACH_BB (bb)
1627     {
1628       bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1629 					     single_succ (ENTRY_BLOCK_PTR), bb);
1630       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1631 	{
1632 	  gimple stmt = gsi_stmt (gsi);
1633 	  use_operand_p use_p;
1634 	  ssa_op_iter op_iter;
1635 	  tree use;
1636 
1637 	  if (is_gimple_debug (stmt))
1638 	    continue;
1639 
1640 	  /* We only do data flow with SSA_NAMEs, so that's all we
1641 	     can warn about.  */
1642 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE)
1643 	    {
1644 	      use = USE_FROM_PTR (use_p);
1645 	      if (always_executed)
1646 		warn_uninit (OPT_Wuninitialized, use,
1647 			     SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1648 			     "%qD is used uninitialized in this function",
1649 			     stmt);
1650 	      else if (warn_possibly_uninitialized)
1651 		warn_uninit (OPT_Wuninitialized, use,
1652 			     SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1653 			     "%qD may be used uninitialized in this function",
1654 			     stmt);
1655 	    }
1656 
1657 	  /* For memory the only cheap thing we can do is see if we
1658 	     have a use of the default def of the virtual operand.
1659 	     ???  Note that at -O0 we do not have virtual operands.
1660 	     ???  Not so cheap would be to use the alias oracle via
1661 	     walk_aliased_vdefs, if we don't find any aliasing vdef
1662 	     warn as is-used-uninitialized, if we don't find an aliasing
1663 	     vdef that kills our use (stmt_kills_ref_p), warn as
1664 	     may-be-used-uninitialized.  But this walk is quadratic and
1665 	     so must be limited which means we would miss warning
1666 	     opportunities.  */
1667 	  use = gimple_vuse (stmt);
1668 	  if (use
1669 	      && gimple_assign_single_p (stmt)
1670 	      && !gimple_vdef (stmt)
1671 	      && SSA_NAME_IS_DEFAULT_DEF (use))
1672 	    {
1673 	      tree rhs = gimple_assign_rhs1 (stmt);
1674 	      tree base = get_base_address (rhs);
1675 
1676 	      /* Do not warn if it can be initialized outside this function.  */
1677 	      if (TREE_CODE (base) != VAR_DECL
1678 		  || DECL_HARD_REGISTER (base)
1679 		  || is_global_var (base))
1680 		continue;
1681 
1682 	      if (always_executed)
1683 		warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1684 			     base,
1685 			     "%qE is used uninitialized in this function",
1686 			     stmt);
1687 	      else if (warn_possibly_uninitialized)
1688 		warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1689 			     base,
1690 			     "%qE may be used uninitialized in this function",
1691 			     stmt);
1692 	    }
1693 	}
1694     }
1695 
1696   return 0;
1697 }
1698 
1699 static unsigned int
execute_early_warn_uninitialized(void)1700 execute_early_warn_uninitialized (void)
1701 {
1702   /* Currently, this pass runs always but
1703      execute_late_warn_uninitialized only runs with optimization. With
1704      optimization we want to warn about possible uninitialized as late
1705      as possible, thus don't do it here.  However, without
1706      optimization we need to warn here about "may be uninitialized".
1707   */
1708   calculate_dominance_info (CDI_POST_DOMINATORS);
1709 
1710   warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
1711 
1712   /* Post-dominator information can not be reliably updated. Free it
1713      after the use.  */
1714 
1715   free_dominance_info (CDI_POST_DOMINATORS);
1716   return 0;
1717 }
1718 
1719 static bool
gate_warn_uninitialized(void)1720 gate_warn_uninitialized (void)
1721 {
1722   return warn_uninitialized != 0;
1723 }
1724 
1725 struct gimple_opt_pass pass_early_warn_uninitialized =
1726 {
1727  {
1728   GIMPLE_PASS,
1729   "*early_warn_uninitialized",		/* name */
1730   gate_warn_uninitialized,		/* gate */
1731   execute_early_warn_uninitialized,	/* execute */
1732   NULL,					/* sub */
1733   NULL,					/* next */
1734   0,					/* static_pass_number */
1735   TV_TREE_UNINIT,			/* tv_id */
1736   PROP_ssa,				/* properties_required */
1737   0,					/* properties_provided */
1738   0,					/* properties_destroyed */
1739   0,					/* todo_flags_start */
1740   0                                     /* todo_flags_finish */
1741  }
1742 };
1743 
1744 
1745 /* If necessary, rewrite the base of the reference tree *TP from
1746    a MEM_REF to a plain or converted symbol.  */
1747 
1748 static void
maybe_rewrite_mem_ref_base(tree * tp)1749 maybe_rewrite_mem_ref_base (tree *tp)
1750 {
1751   tree sym;
1752 
1753   while (handled_component_p (*tp))
1754     tp = &TREE_OPERAND (*tp, 0);
1755   if (TREE_CODE (*tp) == MEM_REF
1756       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1757       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1758       && DECL_P (sym)
1759       && !TREE_ADDRESSABLE (sym)
1760       && symbol_marked_for_renaming (sym))
1761     {
1762       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1763 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1764 					TREE_TYPE (TREE_TYPE (sym)))
1765 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1766 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1767 	{
1768 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1769 			TYPE_SIZE (TREE_TYPE (*tp)),
1770 			int_const_binop (MULT_EXPR,
1771 					 bitsize_int (BITS_PER_UNIT),
1772 					 TREE_OPERAND (*tp, 1)));
1773 	}
1774       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1775 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1776 					     TREE_TYPE (TREE_TYPE (sym))))
1777 	{
1778 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1779 			? REALPART_EXPR : IMAGPART_EXPR,
1780 			TREE_TYPE (*tp), sym);
1781 	}
1782       else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1783 	{
1784 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1785 					  TREE_TYPE (sym)))
1786 	    *tp = build1 (VIEW_CONVERT_EXPR,
1787 			  TREE_TYPE (*tp), sym);
1788 	  else
1789 	    *tp = sym;
1790 	}
1791     }
1792 }
1793 
1794 /* For a tree REF return its base if it is the base of a MEM_REF
1795    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1796 
1797 static tree
non_rewritable_mem_ref_base(tree ref)1798 non_rewritable_mem_ref_base (tree ref)
1799 {
1800   tree base = ref;
1801 
1802   /* A plain decl does not need it set.  */
1803   if (DECL_P (ref))
1804     return NULL_TREE;
1805 
1806   while (handled_component_p (base))
1807     base = TREE_OPERAND (base, 0);
1808 
1809   /* But watch out for MEM_REFs we cannot lower to a
1810      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1811   if (TREE_CODE (base) == MEM_REF
1812       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1813     {
1814       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1815       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1816 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1817 	  && useless_type_conversion_p (TREE_TYPE (base),
1818 					TREE_TYPE (TREE_TYPE (decl)))
1819 	  && double_int_fits_in_uhwi_p (mem_ref_offset (base))
1820 	  && double_int_ucmp
1821 	       (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1822 		mem_ref_offset (base)) == 1
1823 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1824 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1825 	return NULL_TREE;
1826       if (DECL_P (decl)
1827 	  && (!integer_zerop (TREE_OPERAND (base, 1))
1828 	      || (DECL_SIZE (decl)
1829 		  != TYPE_SIZE (TREE_TYPE (base)))
1830 	      || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1831 	return decl;
1832     }
1833 
1834   return NULL_TREE;
1835 }
1836 
1837 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1838    Otherwise return true.  */
1839 
1840 static bool
non_rewritable_lvalue_p(tree lhs)1841 non_rewritable_lvalue_p (tree lhs)
1842 {
1843   /* A plain decl is always rewritable.  */
1844   if (DECL_P (lhs))
1845     return false;
1846 
1847   /* A decl that is wrapped inside a MEM-REF that covers
1848      it full is also rewritable.
1849      ???  The following could be relaxed allowing component
1850      references that do not change the access size.  */
1851   if (TREE_CODE (lhs) == MEM_REF
1852       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1853       && integer_zerop (TREE_OPERAND (lhs, 1)))
1854     {
1855       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1856       if (DECL_P (decl)
1857 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1858 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1859 	return false;
1860     }
1861 
1862   return true;
1863 }
1864 
1865 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1866    mark the variable VAR for conversion into SSA.  Return true when updating
1867    stmts is required.  */
1868 
1869 static bool
maybe_optimize_var(tree var,bitmap addresses_taken,bitmap not_reg_needs)1870 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs)
1871 {
1872   bool update_vops = false;
1873 
1874   /* Global Variables, result decls cannot be changed.  */
1875   if (is_global_var (var)
1876       || TREE_CODE (var) == RESULT_DECL
1877       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1878     return false;
1879 
1880   /* If the variable is not in the list of referenced vars then we
1881      do not need to touch it nor can we rename it.  */
1882   if (!referenced_var_lookup (cfun, DECL_UID (var)))
1883     return false;
1884 
1885   if (TREE_ADDRESSABLE (var)
1886       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1887 	 a non-register.  Otherwise we are confused and forget to
1888 	 add virtual operands for it.  */
1889       && (!is_gimple_reg_type (TREE_TYPE (var))
1890 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1891 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1892 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1893     {
1894       TREE_ADDRESSABLE (var) = 0;
1895       if (is_gimple_reg (var))
1896 	mark_sym_for_renaming (var);
1897       update_vops = true;
1898       if (dump_file)
1899 	{
1900 	  fprintf (dump_file, "No longer having address taken: ");
1901 	  print_generic_expr (dump_file, var, 0);
1902 	  fprintf (dump_file, "\n");
1903 	}
1904     }
1905 
1906   if (!DECL_GIMPLE_REG_P (var)
1907       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1908       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1909 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1910       && !TREE_THIS_VOLATILE (var)
1911       && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1912     {
1913       DECL_GIMPLE_REG_P (var) = 1;
1914       mark_sym_for_renaming (var);
1915       update_vops = true;
1916       if (dump_file)
1917 	{
1918 	  fprintf (dump_file, "Now a gimple register: ");
1919 	  print_generic_expr (dump_file, var, 0);
1920 	  fprintf (dump_file, "\n");
1921 	}
1922     }
1923 
1924   return update_vops;
1925 }
1926 
1927 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1928 
1929 void
execute_update_addresses_taken(void)1930 execute_update_addresses_taken (void)
1931 {
1932   gimple_stmt_iterator gsi;
1933   basic_block bb;
1934   bitmap addresses_taken = BITMAP_ALLOC (NULL);
1935   bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1936   bool update_vops = false;
1937   tree var;
1938   unsigned i;
1939 
1940   timevar_push (TV_ADDRESS_TAKEN);
1941 
1942   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1943      the function body.  */
1944   FOR_EACH_BB (bb)
1945     {
1946       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1947 	{
1948 	  gimple stmt = gsi_stmt (gsi);
1949 	  enum gimple_code code = gimple_code (stmt);
1950 	  tree decl;
1951 
1952 	  /* Note all addresses taken by the stmt.  */
1953 	  gimple_ior_addresses_taken (addresses_taken, stmt);
1954 
1955 	  /* If we have a call or an assignment, see if the lhs contains
1956 	     a local decl that requires not to be a gimple register.  */
1957 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1958 	    {
1959               tree lhs = gimple_get_lhs (stmt);
1960               if (lhs
1961 		  && TREE_CODE (lhs) != SSA_NAME
1962 		  && non_rewritable_lvalue_p (lhs))
1963 		{
1964 		  decl = get_base_address (lhs);
1965 		  if (DECL_P (decl))
1966 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1967                 }
1968 	    }
1969 
1970 	  if (gimple_assign_single_p (stmt))
1971 	    {
1972 	      tree rhs = gimple_assign_rhs1 (stmt);
1973 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1974 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1975 	    }
1976 
1977 	  else if (code == GIMPLE_CALL)
1978 	    {
1979 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1980 		{
1981 		  tree arg = gimple_call_arg (stmt, i);
1982 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1983 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1984 		}
1985 	    }
1986 
1987 	  else if (code == GIMPLE_ASM)
1988 	    {
1989 	      for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1990 		{
1991 		  tree link = gimple_asm_output_op (stmt, i);
1992 		  tree lhs = TREE_VALUE (link);
1993 		  if (TREE_CODE (lhs) != SSA_NAME)
1994 		    {
1995 		      decl = get_base_address (lhs);
1996 		      if (DECL_P (decl)
1997 			  && (non_rewritable_lvalue_p (lhs)
1998 			      /* We cannot move required conversions from
1999 				 the lhs to the rhs in asm statements, so
2000 				 require we do not need any.  */
2001 			      || !useless_type_conversion_p
2002 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
2003 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2004 		    }
2005 		}
2006 	      for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2007 		{
2008 		  tree link = gimple_asm_input_op (stmt, i);
2009 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
2010 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2011 		}
2012 	    }
2013 	}
2014 
2015       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2016 	{
2017 	  size_t i;
2018 	  gimple phi = gsi_stmt (gsi);
2019 
2020 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
2021 	    {
2022 	      tree op = PHI_ARG_DEF (phi, i), var;
2023 	      if (TREE_CODE (op) == ADDR_EXPR
2024 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
2025 		  && DECL_P (var))
2026 		bitmap_set_bit (addresses_taken, DECL_UID (var));
2027 	    }
2028 	}
2029     }
2030 
2031   /* We cannot iterate over all referenced vars because that can contain
2032      unused vars from BLOCK trees, which causes code generation differences
2033      for -g vs. -g0.  */
2034   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
2035     update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2036 
2037   FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
2038     update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2039 
2040   /* Operand caches need to be recomputed for operands referencing the updated
2041      variables.  */
2042   if (update_vops)
2043     {
2044       FOR_EACH_BB (bb)
2045 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2046 	  {
2047 	    gimple stmt = gsi_stmt (gsi);
2048 
2049 	    /* Re-write TARGET_MEM_REFs of symbols we want to
2050 	       rewrite into SSA form.  */
2051 	    if (gimple_assign_single_p (stmt))
2052 	      {
2053 		tree lhs = gimple_assign_lhs (stmt);
2054 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
2055 		tree sym;
2056 
2057 		/* We shouldn't have any fancy wrapping of
2058 		   component-refs on the LHS, but look through
2059 		   VIEW_CONVERT_EXPRs as that is easy.  */
2060 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
2061 		  lhs = TREE_OPERAND (lhs, 0);
2062 		if (TREE_CODE (lhs) == MEM_REF
2063 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2064 		    && integer_zerop (TREE_OPERAND (lhs, 1))
2065 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2066 		    && DECL_P (sym)
2067 		    && !TREE_ADDRESSABLE (sym)
2068 		    && symbol_marked_for_renaming (sym))
2069 		  lhs = sym;
2070 		else
2071 		  lhs = gimple_assign_lhs (stmt);
2072 
2073 		/* Rewrite the RHS and make sure the resulting assignment
2074 		   is validly typed.  */
2075 		maybe_rewrite_mem_ref_base (rhsp);
2076 		rhs = gimple_assign_rhs1 (stmt);
2077 		if (gimple_assign_lhs (stmt) != lhs
2078 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
2079 						   TREE_TYPE (rhs)))
2080 		  rhs = fold_build1 (VIEW_CONVERT_EXPR,
2081 				     TREE_TYPE (lhs), rhs);
2082 
2083 		if (gimple_assign_lhs (stmt) != lhs)
2084 		  gimple_assign_set_lhs (stmt, lhs);
2085 
2086 		/* For var ={v} {CLOBBER}; where var lost
2087 		   TREE_ADDRESSABLE just remove the stmt.  */
2088 		if (DECL_P (lhs)
2089 		    && TREE_CLOBBER_P (rhs)
2090 		    && symbol_marked_for_renaming (lhs))
2091 		  {
2092 		    unlink_stmt_vdef (stmt);
2093       		    gsi_remove (&gsi, true);
2094 		    release_defs (stmt);
2095 		    continue;
2096 		  }
2097 
2098 		if (gimple_assign_rhs1 (stmt) != rhs)
2099 		  {
2100 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2101 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
2102 		  }
2103 	      }
2104 
2105 	    else if (gimple_code (stmt) == GIMPLE_CALL)
2106 	      {
2107 		unsigned i;
2108 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
2109 		  {
2110 		    tree *argp = gimple_call_arg_ptr (stmt, i);
2111 		    maybe_rewrite_mem_ref_base (argp);
2112 		  }
2113 	      }
2114 
2115 	    else if (gimple_code (stmt) == GIMPLE_ASM)
2116 	      {
2117 		unsigned i;
2118 		for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2119 		  {
2120 		    tree link = gimple_asm_output_op (stmt, i);
2121 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2122 		  }
2123 		for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2124 		  {
2125 		    tree link = gimple_asm_input_op (stmt, i);
2126 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2127 		  }
2128 	      }
2129 
2130 	    else if (gimple_debug_bind_p (stmt)
2131 		     && gimple_debug_bind_has_value_p (stmt))
2132 	      {
2133 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2134 		tree decl;
2135 		maybe_rewrite_mem_ref_base (valuep);
2136 		decl = non_rewritable_mem_ref_base (*valuep);
2137 		if (decl && symbol_marked_for_renaming (decl))
2138 		  gimple_debug_bind_reset_value (stmt);
2139 	      }
2140 
2141 	    if (gimple_references_memory_p (stmt)
2142 		|| is_gimple_debug (stmt))
2143 	      update_stmt (stmt);
2144 
2145 	    gsi_next (&gsi);
2146 	  }
2147 
2148       /* Update SSA form here, we are called as non-pass as well.  */
2149       if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2150 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2151       else
2152 	update_ssa (TODO_update_ssa);
2153     }
2154 
2155   BITMAP_FREE (not_reg_needs);
2156   BITMAP_FREE (addresses_taken);
2157   timevar_pop (TV_ADDRESS_TAKEN);
2158 }
2159 
2160 struct gimple_opt_pass pass_update_address_taken =
2161 {
2162  {
2163   GIMPLE_PASS,
2164   "addressables",			/* name */
2165   NULL,					/* gate */
2166   NULL,					/* execute */
2167   NULL,					/* sub */
2168   NULL,					/* next */
2169   0,					/* static_pass_number */
2170   TV_ADDRESS_TAKEN,			/* tv_id */
2171   PROP_ssa,				/* properties_required */
2172   0,					/* properties_provided */
2173   0,					/* properties_destroyed */
2174   0,					/* todo_flags_start */
2175   TODO_update_address_taken             /* todo_flags_finish */
2176  }
2177 };
2178