1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 
43 /* Pointer map of variable mappings, keyed by edge.  */
44 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
45 
46 
47 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
48 
49 void
redirect_edge_var_map_add(edge e,tree result,tree def,source_location locus)50 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
51 {
52   edge_var_map new_node;
53 
54   if (edge_var_maps == NULL)
55     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
56 
57   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
58   new_node.def = def;
59   new_node.result = result;
60   new_node.locus = locus;
61 
62   slot.safe_push (new_node);
63 }
64 
65 
66 /* Clear the var mappings in edge E.  */
67 
68 void
redirect_edge_var_map_clear(edge e)69 redirect_edge_var_map_clear (edge e)
70 {
71   if (!edge_var_maps)
72     return;
73 
74   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
75 
76   if (head)
77     head->release ();
78 }
79 
80 
81 /* Duplicate the redirected var mappings in OLDE in NEWE.
82 
83    This assumes a hash_map can have multiple edges mapping to the same
84    var_map (many to one mapping), since we don't remove the previous mappings.
85    */
86 
87 void
redirect_edge_var_map_dup(edge newe,edge olde)88 redirect_edge_var_map_dup (edge newe, edge olde)
89 {
90   if (!edge_var_maps)
91     return;
92 
93   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
94   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
95   if (!old_head)
96     return;
97 
98   new_head->safe_splice (*old_head);
99 }
100 
101 
102 /* Return the variable mappings for a given edge.  If there is none, return
103    NULL.  */
104 
105 vec<edge_var_map> *
redirect_edge_var_map_vector(edge e)106 redirect_edge_var_map_vector (edge e)
107 {
108   /* Hey, what kind of idiot would... you'd be surprised.  */
109   if (!edge_var_maps)
110     return NULL;
111 
112   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
113   if (!slot)
114     return NULL;
115 
116   return slot;
117 }
118 
119 /* Clear the edge variable mappings.  */
120 
121 void
redirect_edge_var_map_empty(void)122 redirect_edge_var_map_empty (void)
123 {
124   if (edge_var_maps)
125     edge_var_maps->empty ();
126 }
127 
128 
129 /* Remove the corresponding arguments from the PHI nodes in E's
130    destination block and redirect it to DEST.  Return redirected edge.
131    The list of removed arguments is stored in a vector accessed
132    through edge_var_maps.  */
133 
134 edge
ssa_redirect_edge(edge e,basic_block dest)135 ssa_redirect_edge (edge e, basic_block dest)
136 {
137   gphi_iterator gsi;
138   gphi *phi;
139 
140   redirect_edge_var_map_clear (e);
141 
142   /* Remove the appropriate PHI arguments in E's destination block.  */
143   for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
144     {
145       tree def;
146       source_location locus ;
147 
148       phi = gsi.phi ();
149       def = gimple_phi_arg_def (phi, e->dest_idx);
150       locus = gimple_phi_arg_location (phi, e->dest_idx);
151 
152       if (def == NULL_TREE)
153 	continue;
154 
155       redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
156     }
157 
158   e = redirect_edge_succ_nodup (e, dest);
159 
160   return e;
161 }
162 
163 
164 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
165    E->dest.  */
166 
167 void
flush_pending_stmts(edge e)168 flush_pending_stmts (edge e)
169 {
170   gphi *phi;
171   edge_var_map *vm;
172   int i;
173   gphi_iterator gsi;
174 
175   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
176   if (!v)
177     return;
178 
179   for (gsi = gsi_start_phis (e->dest), i = 0;
180        !gsi_end_p (gsi) && v->iterate (i, &vm);
181        gsi_next (&gsi), i++)
182     {
183       tree def;
184 
185       phi = gsi.phi ();
186       def = redirect_edge_var_map_def (vm);
187       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
188     }
189 
190   redirect_edge_var_map_clear (e);
191 }
192 
193 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
194    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
195    expression with a different value.
196 
197    This will update any annotations (say debug bind stmts) referring
198    to the original LHS, so that they use the RHS instead.  This is
199    done even if NLHS and LHS are the same, for it is understood that
200    the RHS will be modified afterwards, and NLHS will not be assigned
201    an equivalent value.
202 
203    Adjusting any non-annotation uses of the LHS, if needed, is a
204    responsibility of the caller.
205 
206    The effect of this call should be pretty much the same as that of
207    inserting a copy of STMT before STMT, and then removing the
208    original stmt, at which time gsi_remove() would have update
209    annotations, but using this function saves all the inserting,
210    copying and removing.  */
211 
212 void
gimple_replace_ssa_lhs(gimple * stmt,tree nlhs)213 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
214 {
215   if (MAY_HAVE_DEBUG_STMTS)
216     {
217       tree lhs = gimple_get_lhs (stmt);
218 
219       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
220 
221       insert_debug_temp_for_var_def (NULL, lhs);
222     }
223 
224   gimple_set_lhs (stmt, nlhs);
225 }
226 
227 
228 /* Given a tree for an expression for which we might want to emit
229    locations or values in debug information (generally a variable, but
230    we might deal with other kinds of trees in the future), return the
231    tree that should be used as the variable of a DEBUG_BIND STMT or
232    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
233 
234 tree
target_for_debug_bind(tree var)235 target_for_debug_bind (tree var)
236 {
237   if (!MAY_HAVE_DEBUG_STMTS)
238     return NULL_TREE;
239 
240   if (TREE_CODE (var) == SSA_NAME)
241     {
242       var = SSA_NAME_VAR (var);
243       if (var == NULL_TREE)
244 	return NULL_TREE;
245     }
246 
247   if ((TREE_CODE (var) != VAR_DECL
248        || VAR_DECL_IS_VIRTUAL_OPERAND (var))
249       && TREE_CODE (var) != PARM_DECL)
250     return NULL_TREE;
251 
252   if (DECL_HAS_VALUE_EXPR_P (var))
253     return target_for_debug_bind (DECL_VALUE_EXPR (var));
254 
255   if (DECL_IGNORED_P (var))
256     return NULL_TREE;
257 
258   /* var-tracking only tracks registers.  */
259   if (!is_gimple_reg_type (TREE_TYPE (var)))
260     return NULL_TREE;
261 
262   return var;
263 }
264 
265 /* Called via walk_tree, look for SSA_NAMEs that have already been
266    released.  */
267 
268 static tree
find_released_ssa_name(tree * tp,int * walk_subtrees,void * data_)269 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
270 {
271   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
272 
273   if (wi && wi->is_lhs)
274     return NULL_TREE;
275 
276   if (TREE_CODE (*tp) == SSA_NAME)
277     {
278       if (SSA_NAME_IN_FREE_LIST (*tp))
279 	return *tp;
280 
281       *walk_subtrees = 0;
282     }
283   else if (IS_TYPE_OR_DECL_P (*tp))
284     *walk_subtrees = 0;
285 
286   return NULL_TREE;
287 }
288 
289 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
290    by other DEBUG stmts, and replace uses of the DEF with the
291    newly-created debug temp.  */
292 
293 void
insert_debug_temp_for_var_def(gimple_stmt_iterator * gsi,tree var)294 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
295 {
296   imm_use_iterator imm_iter;
297   use_operand_p use_p;
298   gimple *stmt;
299   gimple *def_stmt = NULL;
300   int usecount = 0;
301   tree value = NULL;
302 
303   if (!MAY_HAVE_DEBUG_STMTS)
304     return;
305 
306   /* If this name has already been registered for replacement, do nothing
307      as anything that uses this name isn't in SSA form.  */
308   if (name_registered_for_update_p (var))
309     return;
310 
311   /* Check whether there are debug stmts that reference this variable and,
312      if there are, decide whether we should use a debug temp.  */
313   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
314     {
315       stmt = USE_STMT (use_p);
316 
317       if (!gimple_debug_bind_p (stmt))
318 	continue;
319 
320       if (usecount++)
321 	break;
322 
323       if (gimple_debug_bind_get_value (stmt) != var)
324 	{
325 	  /* Count this as an additional use, so as to make sure we
326 	     use a temp unless VAR's definition has a SINGLE_RHS that
327 	     can be shared.  */
328 	  usecount++;
329 	  break;
330 	}
331     }
332 
333   if (!usecount)
334     return;
335 
336   if (gsi)
337     def_stmt = gsi_stmt (*gsi);
338   else
339     def_stmt = SSA_NAME_DEF_STMT (var);
340 
341   /* If we didn't get an insertion point, and the stmt has already
342      been removed, we won't be able to insert the debug bind stmt, so
343      we'll have to drop debug information.  */
344   if (gimple_code (def_stmt) == GIMPLE_PHI)
345     {
346       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
347       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
348 	value = NULL;
349       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
350 	 to.  */
351       else if (value == error_mark_node)
352 	value = NULL;
353     }
354   else if (is_gimple_assign (def_stmt))
355     {
356       bool no_value = false;
357 
358       if (!dom_info_available_p (CDI_DOMINATORS))
359 	{
360 	  struct walk_stmt_info wi;
361 
362 	  memset (&wi, 0, sizeof (wi));
363 
364 	  /* When removing blocks without following reverse dominance
365 	     order, we may sometimes encounter SSA_NAMEs that have
366 	     already been released, referenced in other SSA_DEFs that
367 	     we're about to release.  Consider:
368 
369 	     <bb X>:
370 	     v_1 = foo;
371 
372 	     <bb Y>:
373 	     w_2 = v_1 + bar;
374 	     # DEBUG w => w_2
375 
376 	     If we deleted BB X first, propagating the value of w_2
377 	     won't do us any good.  It's too late to recover their
378 	     original definition of v_1: when it was deleted, it was
379 	     only referenced in other DEFs, it couldn't possibly know
380 	     it should have been retained, and propagating every
381 	     single DEF just in case it might have to be propagated
382 	     into a DEBUG STMT would probably be too wasteful.
383 
384 	     When dominator information is not readily available, we
385 	     check for and accept some loss of debug information.  But
386 	     if it is available, there's no excuse for us to remove
387 	     blocks in the wrong order, so we don't even check for
388 	     dead SSA NAMEs.  SSA verification shall catch any
389 	     errors.  */
390 	  if ((!gsi && !gimple_bb (def_stmt))
391 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
392 	    no_value = true;
393 	}
394 
395       if (!no_value)
396 	value = gimple_assign_rhs_to_tree (def_stmt);
397     }
398 
399   if (value)
400     {
401       /* If there's a single use of VAR, and VAR is the entire debug
402 	 expression (usecount would have been incremented again
403 	 otherwise), and the definition involves only constants and
404 	 SSA names, then we can propagate VALUE into this single use,
405 	 avoiding the temp.
406 
407 	 We can also avoid using a temp if VALUE can be shared and
408 	 propagated into all uses, without generating expressions that
409 	 wouldn't be valid gimple RHSs.
410 
411 	 Other cases that would require unsharing or non-gimple RHSs
412 	 are deferred to a debug temp, although we could avoid temps
413 	 at the expense of duplication of expressions.  */
414 
415       if (CONSTANT_CLASS_P (value)
416 	  || gimple_code (def_stmt) == GIMPLE_PHI
417 	  || (usecount == 1
418 	      && (!gimple_assign_single_p (def_stmt)
419 		  || is_gimple_min_invariant (value)))
420 	  || is_gimple_reg (value))
421 	;
422       else
423 	{
424 	  gdebug *def_temp;
425 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
426 
427 	  def_temp = gimple_build_debug_bind (vexpr,
428 					      unshare_expr (value),
429 					      def_stmt);
430 
431 	  DECL_ARTIFICIAL (vexpr) = 1;
432 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
433 	  if (DECL_P (value))
434 	    DECL_MODE (vexpr) = DECL_MODE (value);
435 	  else
436 	    DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
437 
438 	  if (gsi)
439 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
440 	  else
441 	    {
442 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
443 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
444 	    }
445 
446 	  value = vexpr;
447 	}
448     }
449 
450   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
451     {
452       if (!gimple_debug_bind_p (stmt))
453 	continue;
454 
455       if (value)
456 	{
457 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
458 	    /* unshare_expr is not needed here.  vexpr is either a
459 	       SINGLE_RHS, that can be safely shared, some other RHS
460 	       that was unshared when we found it had a single debug
461 	       use, or a DEBUG_EXPR_DECL, that can be safely
462 	       shared.  */
463 	    SET_USE (use_p, unshare_expr (value));
464 	  /* If we didn't replace uses with a debug decl fold the
465 	     resulting expression.  Otherwise we end up with invalid IL.  */
466 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
467 	    {
468 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
469 	      fold_stmt_inplace (&gsi);
470 	    }
471 	}
472       else
473 	gimple_debug_bind_reset_value (stmt);
474 
475       update_stmt (stmt);
476     }
477 }
478 
479 
480 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
481    other DEBUG stmts, and replace uses of the DEF with the
482    newly-created debug temp.  */
483 
484 void
insert_debug_temps_for_defs(gimple_stmt_iterator * gsi)485 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
486 {
487   gimple *stmt;
488   ssa_op_iter op_iter;
489   def_operand_p def_p;
490 
491   if (!MAY_HAVE_DEBUG_STMTS)
492     return;
493 
494   stmt = gsi_stmt (*gsi);
495 
496   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
497     {
498       tree var = DEF_FROM_PTR (def_p);
499 
500       if (TREE_CODE (var) != SSA_NAME)
501 	continue;
502 
503       insert_debug_temp_for_var_def (gsi, var);
504     }
505 }
506 
507 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
508 
509 void
reset_debug_uses(gimple * stmt)510 reset_debug_uses (gimple *stmt)
511 {
512   ssa_op_iter op_iter;
513   def_operand_p def_p;
514   imm_use_iterator imm_iter;
515   gimple *use_stmt;
516 
517   if (!MAY_HAVE_DEBUG_STMTS)
518     return;
519 
520   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
521     {
522       tree var = DEF_FROM_PTR (def_p);
523 
524       if (TREE_CODE (var) != SSA_NAME)
525 	continue;
526 
527       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
528 	{
529 	  if (!gimple_debug_bind_p (use_stmt))
530 	    continue;
531 
532 	  gimple_debug_bind_reset_value (use_stmt);
533 	  update_stmt (use_stmt);
534 	}
535     }
536 }
537 
538 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
539    dominated stmts before their dominators, so that release_ssa_defs
540    stands a chance of propagating DEFs into debug bind stmts.  */
541 
542 void
release_defs_bitset(bitmap toremove)543 release_defs_bitset (bitmap toremove)
544 {
545   unsigned j;
546   bitmap_iterator bi;
547 
548   /* Performing a topological sort is probably overkill, this will
549      most likely run in slightly superlinear time, rather than the
550      pathological quadratic worst case.  */
551   while (!bitmap_empty_p (toremove))
552     EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
553       {
554 	bool remove_now = true;
555 	tree var = ssa_name (j);
556 	gimple *stmt;
557 	imm_use_iterator uit;
558 
559 	FOR_EACH_IMM_USE_STMT (stmt, uit, var)
560 	  {
561 	    ssa_op_iter dit;
562 	    def_operand_p def_p;
563 
564 	    /* We can't propagate PHI nodes into debug stmts.  */
565 	    if (gimple_code (stmt) == GIMPLE_PHI
566 		|| is_gimple_debug (stmt))
567 	      continue;
568 
569 	    /* If we find another definition to remove that uses
570 	       the one we're looking at, defer the removal of this
571 	       one, so that it can be propagated into debug stmts
572 	       after the other is.  */
573 	    FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
574 	      {
575 		tree odef = DEF_FROM_PTR (def_p);
576 
577 		if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
578 		  {
579 		    remove_now = false;
580 		    break;
581 		  }
582 	      }
583 
584 	    if (!remove_now)
585 	      BREAK_FROM_IMM_USE_STMT (uit);
586 	  }
587 
588 	if (remove_now)
589 	  {
590 	    gimple *def = SSA_NAME_DEF_STMT (var);
591 	    gimple_stmt_iterator gsi = gsi_for_stmt (def);
592 
593 	    if (gimple_code (def) == GIMPLE_PHI)
594 	      remove_phi_node (&gsi, true);
595 	    else
596 	      {
597 		gsi_remove (&gsi, true);
598 		release_defs (def);
599 	      }
600 
601 	    bitmap_clear_bit (toremove, j);
602 	  }
603       }
604 }
605 
606 /* Return true if SSA_NAME is malformed and mark it visited.
607 
608    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
609       operand.  */
610 
611 static bool
verify_ssa_name(tree ssa_name,bool is_virtual)612 verify_ssa_name (tree ssa_name, bool is_virtual)
613 {
614   if (TREE_CODE (ssa_name) != SSA_NAME)
615     {
616       error ("expected an SSA_NAME object");
617       return true;
618     }
619 
620   if (SSA_NAME_IN_FREE_LIST (ssa_name))
621     {
622       error ("found an SSA_NAME that had been released into the free pool");
623       return true;
624     }
625 
626   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
627       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
628     {
629       error ("type mismatch between an SSA_NAME and its symbol");
630       return true;
631     }
632 
633   if (is_virtual && !virtual_operand_p (ssa_name))
634     {
635       error ("found a virtual definition for a GIMPLE register");
636       return true;
637     }
638 
639   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
640     {
641       error ("virtual SSA name for non-VOP decl");
642       return true;
643     }
644 
645   if (!is_virtual && virtual_operand_p (ssa_name))
646     {
647       error ("found a real definition for a non-register");
648       return true;
649     }
650 
651   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
652       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
653     {
654       error ("found a default name with a non-empty defining statement");
655       return true;
656     }
657 
658   return false;
659 }
660 
661 
662 /* Return true if the definition of SSA_NAME at block BB is malformed.
663 
664    STMT is the statement where SSA_NAME is created.
665 
666    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
667       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
668       it means that the block in that array slot contains the
669       definition of SSA_NAME.
670 
671    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
672 
673 static bool
verify_def(basic_block bb,basic_block * definition_block,tree ssa_name,gimple * stmt,bool is_virtual)674 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
675 	    gimple *stmt, bool is_virtual)
676 {
677   if (verify_ssa_name (ssa_name, is_virtual))
678     goto err;
679 
680   if (SSA_NAME_VAR (ssa_name)
681       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
682       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
683     {
684       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
685       goto err;
686     }
687 
688   if (definition_block[SSA_NAME_VERSION (ssa_name)])
689     {
690       error ("SSA_NAME created in two different blocks %i and %i",
691 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
692       goto err;
693     }
694 
695   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
696 
697   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
698     {
699       error ("SSA_NAME_DEF_STMT is wrong");
700       fprintf (stderr, "Expected definition statement:\n");
701       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
702       fprintf (stderr, "\nActual definition statement:\n");
703       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
704       goto err;
705     }
706 
707   return false;
708 
709 err:
710   fprintf (stderr, "while verifying SSA_NAME ");
711   print_generic_expr (stderr, ssa_name, 0);
712   fprintf (stderr, " in statement\n");
713   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
714 
715   return true;
716 }
717 
718 
719 /* Return true if the use of SSA_NAME at statement STMT in block BB is
720    malformed.
721 
722    DEF_BB is the block where SSA_NAME was found to be created.
723 
724    IDOM contains immediate dominator information for the flowgraph.
725 
726    CHECK_ABNORMAL is true if the caller wants to check whether this use
727       is flowing through an abnormal edge (only used when checking PHI
728       arguments).
729 
730    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
731      that are defined before STMT in basic block BB.  */
732 
733 static bool
verify_use(basic_block bb,basic_block def_bb,use_operand_p use_p,gimple * stmt,bool check_abnormal,bitmap names_defined_in_bb)734 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
735 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
736 {
737   bool err = false;
738   tree ssa_name = USE_FROM_PTR (use_p);
739 
740   if (!TREE_VISITED (ssa_name))
741     if (verify_imm_links (stderr, ssa_name))
742       err = true;
743 
744   TREE_VISITED (ssa_name) = 1;
745 
746   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
747       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
748     ; /* Default definitions have empty statements.  Nothing to do.  */
749   else if (!def_bb)
750     {
751       error ("missing definition");
752       err = true;
753     }
754   else if (bb != def_bb
755 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
756     {
757       error ("definition in block %i does not dominate use in block %i",
758 	     def_bb->index, bb->index);
759       err = true;
760     }
761   else if (bb == def_bb
762 	   && names_defined_in_bb != NULL
763 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
764     {
765       error ("definition in block %i follows the use", def_bb->index);
766       err = true;
767     }
768 
769   if (check_abnormal
770       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
771     {
772       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
773       err = true;
774     }
775 
776   /* Make sure the use is in an appropriate list by checking the previous
777      element to make sure it's the same.  */
778   if (use_p->prev == NULL)
779     {
780       error ("no immediate_use list");
781       err = true;
782     }
783   else
784     {
785       tree listvar;
786       if (use_p->prev->use == NULL)
787 	listvar = use_p->prev->loc.ssa_name;
788       else
789 	listvar = USE_FROM_PTR (use_p->prev);
790       if (listvar != ssa_name)
791         {
792 	  error ("wrong immediate use list");
793 	  err = true;
794 	}
795     }
796 
797   if (err)
798     {
799       fprintf (stderr, "for SSA_NAME: ");
800       print_generic_expr (stderr, ssa_name, TDF_VOPS);
801       fprintf (stderr, " in statement:\n");
802       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
803     }
804 
805   return err;
806 }
807 
808 
809 /* Return true if any of the arguments for PHI node PHI at block BB is
810    malformed.
811 
812    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
813       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
814       it means that the block in that array slot contains the
815       definition of SSA_NAME.  */
816 
817 static bool
verify_phi_args(gphi * phi,basic_block bb,basic_block * definition_block)818 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
819 {
820   edge e;
821   bool err = false;
822   size_t i, phi_num_args = gimple_phi_num_args (phi);
823 
824   if (EDGE_COUNT (bb->preds) != phi_num_args)
825     {
826       error ("incoming edge count does not match number of PHI arguments");
827       err = true;
828       goto error;
829     }
830 
831   for (i = 0; i < phi_num_args; i++)
832     {
833       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
834       tree op = USE_FROM_PTR (op_p);
835 
836       e = EDGE_PRED (bb, i);
837 
838       if (op == NULL_TREE)
839 	{
840 	  error ("PHI argument is missing for edge %d->%d",
841 	         e->src->index,
842 		 e->dest->index);
843 	  err = true;
844 	  goto error;
845 	}
846 
847       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
848 	{
849 	  error ("PHI argument is not SSA_NAME, or invariant");
850 	  err = true;
851 	}
852 
853       if (TREE_CODE (op) == SSA_NAME)
854 	{
855 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
856 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
857 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
858 	}
859 
860       if (TREE_CODE (op) == ADDR_EXPR)
861 	{
862 	  tree base = TREE_OPERAND (op, 0);
863 	  while (handled_component_p (base))
864 	    base = TREE_OPERAND (base, 0);
865 	  if ((TREE_CODE (base) == VAR_DECL
866 	       || TREE_CODE (base) == PARM_DECL
867 	       || TREE_CODE (base) == RESULT_DECL)
868 	      && !TREE_ADDRESSABLE (base))
869 	    {
870 	      error ("address taken, but ADDRESSABLE bit not set");
871 	      err = true;
872 	    }
873 	}
874 
875       if (e->dest != bb)
876 	{
877 	  error ("wrong edge %d->%d for PHI argument",
878 	         e->src->index, e->dest->index);
879 	  err = true;
880 	}
881 
882       if (err)
883 	{
884 	  fprintf (stderr, "PHI argument\n");
885 	  print_generic_stmt (stderr, op, TDF_VOPS);
886 	  goto error;
887 	}
888     }
889 
890 error:
891   if (err)
892     {
893       fprintf (stderr, "for PHI node\n");
894       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
895     }
896 
897 
898   return err;
899 }
900 
901 
902 /* Verify common invariants in the SSA web.
903    TODO: verify the variable annotations.  */
904 
905 DEBUG_FUNCTION void
verify_ssa(bool check_modified_stmt,bool check_ssa_operands)906 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
907 {
908   size_t i;
909   basic_block bb;
910   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
911   ssa_op_iter iter;
912   tree op;
913   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
914   bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
915 
916   gcc_assert (!need_ssa_update_p (cfun));
917 
918   timevar_push (TV_TREE_SSA_VERIFY);
919 
920   /* Keep track of SSA names present in the IL.  */
921   for (i = 1; i < num_ssa_names; i++)
922     {
923       tree name = ssa_name (i);
924       if (name)
925 	{
926 	  gimple *stmt;
927 	  TREE_VISITED (name) = 0;
928 
929 	  verify_ssa_name (name, virtual_operand_p (name));
930 
931 	  stmt = SSA_NAME_DEF_STMT (name);
932 	  if (!gimple_nop_p (stmt))
933 	    {
934 	      basic_block bb = gimple_bb (stmt);
935 	      if (verify_def (bb, definition_block,
936 			      name, stmt, virtual_operand_p (name)))
937 		goto err;
938 	    }
939 	}
940     }
941 
942   calculate_dominance_info (CDI_DOMINATORS);
943 
944   /* Now verify all the uses and make sure they agree with the definitions
945      found in the previous pass.  */
946   FOR_EACH_BB_FN (bb, cfun)
947     {
948       edge e;
949       edge_iterator ei;
950 
951       /* Make sure that all edges have a clear 'aux' field.  */
952       FOR_EACH_EDGE (e, ei, bb->preds)
953 	{
954 	  if (e->aux)
955 	    {
956 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
957 		      e->dest->index);
958 	      goto err;
959 	    }
960 	}
961 
962       /* Verify the arguments for every PHI node in the block.  */
963       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
964 	{
965 	  gphi *phi = gsi.phi ();
966 	  if (verify_phi_args (phi, bb, definition_block))
967 	    goto err;
968 
969 	  bitmap_set_bit (names_defined_in_bb,
970 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
971 	}
972 
973       /* Now verify all the uses and vuses in every statement of the block.  */
974       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
975 	   gsi_next (&gsi))
976 	{
977 	  gimple *stmt = gsi_stmt (gsi);
978 	  use_operand_p use_p;
979 
980 	  if (check_modified_stmt && gimple_modified_p (stmt))
981 	    {
982 	      error ("stmt (%p) marked modified after optimization pass: ",
983 		     (void *)stmt);
984 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
985 	      goto err;
986 	    }
987 
988 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
989 	    {
990 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
991 	      goto err;
992 	    }
993 
994 	  if (gimple_debug_bind_p (stmt)
995 	      && !gimple_debug_bind_has_value_p (stmt))
996 	    continue;
997 
998 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
999 	    {
1000 	      op = USE_FROM_PTR (use_p);
1001 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1002 			      use_p, stmt, false, names_defined_in_bb))
1003 		goto err;
1004 	    }
1005 
1006 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1007 	    {
1008 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1009 		{
1010 		  error ("SSA_NAME_DEF_STMT is wrong");
1011 		  fprintf (stderr, "Expected definition statement:\n");
1012 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1013 		  fprintf (stderr, "\nActual definition statement:\n");
1014 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1015 				     4, TDF_VOPS);
1016 		  goto err;
1017 		}
1018 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1019 	    }
1020 	}
1021 
1022       bitmap_clear (names_defined_in_bb);
1023     }
1024 
1025   free (definition_block);
1026 
1027   /* Restore the dominance information to its prior known state, so
1028      that we do not perturb the compiler's subsequent behavior.  */
1029   if (orig_dom_state == DOM_NONE)
1030     free_dominance_info (CDI_DOMINATORS);
1031   else
1032     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1033 
1034   BITMAP_FREE (names_defined_in_bb);
1035   timevar_pop (TV_TREE_SSA_VERIFY);
1036   return;
1037 
1038 err:
1039   internal_error ("verify_ssa failed");
1040 }
1041 
1042 
1043 /* Initialize global DFA and SSA structures.  */
1044 
1045 void
init_tree_ssa(struct function * fn)1046 init_tree_ssa (struct function *fn)
1047 {
1048   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1049   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1050   pt_solution_reset (&fn->gimple_df->escaped);
1051   init_ssanames (fn, 0);
1052 }
1053 
1054 /* Do the actions required to initialize internal data structures used
1055    in tree-ssa optimization passes.  */
1056 
1057 static unsigned int
execute_init_datastructures(void)1058 execute_init_datastructures (void)
1059 {
1060   /* Allocate hash tables, arrays and other structures.  */
1061   gcc_assert (!cfun->gimple_df);
1062   init_tree_ssa (cfun);
1063   return 0;
1064 }
1065 
1066 namespace {
1067 
1068 const pass_data pass_data_init_datastructures =
1069 {
1070   GIMPLE_PASS, /* type */
1071   "*init_datastructures", /* name */
1072   OPTGROUP_NONE, /* optinfo_flags */
1073   TV_NONE, /* tv_id */
1074   PROP_cfg, /* properties_required */
1075   0, /* properties_provided */
1076   0, /* properties_destroyed */
1077   0, /* todo_flags_start */
1078   0, /* todo_flags_finish */
1079 };
1080 
1081 class pass_init_datastructures : public gimple_opt_pass
1082 {
1083 public:
pass_init_datastructures(gcc::context * ctxt)1084   pass_init_datastructures (gcc::context *ctxt)
1085     : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1086   {}
1087 
1088   /* opt_pass methods: */
gate(function * fun)1089   virtual bool gate (function *fun)
1090     {
1091       /* Do nothing for funcions that was produced already in SSA form.  */
1092       return !(fun->curr_properties & PROP_ssa);
1093     }
1094 
execute(function *)1095   virtual unsigned int execute (function *)
1096     {
1097       return execute_init_datastructures ();
1098     }
1099 
1100 }; // class pass_init_datastructures
1101 
1102 } // anon namespace
1103 
1104 gimple_opt_pass *
make_pass_init_datastructures(gcc::context * ctxt)1105 make_pass_init_datastructures (gcc::context *ctxt)
1106 {
1107   return new pass_init_datastructures (ctxt);
1108 }
1109 
1110 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1111 
1112 void
delete_tree_ssa(struct function * fn)1113 delete_tree_ssa (struct function *fn)
1114 {
1115   fini_ssanames (fn);
1116 
1117   /* We no longer maintain the SSA operand cache at this point.  */
1118   if (ssa_operands_active (fn))
1119     fini_ssa_operands (fn);
1120 
1121   fn->gimple_df->default_defs->empty ();
1122   fn->gimple_df->default_defs = NULL;
1123   pt_solution_reset (&fn->gimple_df->escaped);
1124   if (fn->gimple_df->decls_to_pointers != NULL)
1125     delete fn->gimple_df->decls_to_pointers;
1126   fn->gimple_df->decls_to_pointers = NULL;
1127   fn->gimple_df = NULL;
1128 
1129   /* We no longer need the edge variable maps.  */
1130   redirect_edge_var_map_empty ();
1131 }
1132 
1133 /* Return true if EXPR is a useless type conversion, otherwise return
1134    false.  */
1135 
1136 bool
tree_ssa_useless_type_conversion(tree expr)1137 tree_ssa_useless_type_conversion (tree expr)
1138 {
1139   /* If we have an assignment that merely uses a NOP_EXPR to change
1140      the top of the RHS to the type of the LHS and the type conversion
1141      is "safe", then strip away the type conversion so that we can
1142      enter LHS = RHS into the const_and_copies table.  */
1143   if (CONVERT_EXPR_P (expr)
1144       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1145       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1146     return useless_type_conversion_p
1147       (TREE_TYPE (expr),
1148        TREE_TYPE (TREE_OPERAND (expr, 0)));
1149 
1150   return false;
1151 }
1152 
1153 /* Strip conversions from EXP according to
1154    tree_ssa_useless_type_conversion and return the resulting
1155    expression.  */
1156 
1157 tree
tree_ssa_strip_useless_type_conversions(tree exp)1158 tree_ssa_strip_useless_type_conversions (tree exp)
1159 {
1160   while (tree_ssa_useless_type_conversion (exp))
1161     exp = TREE_OPERAND (exp, 0);
1162   return exp;
1163 }
1164 
1165 
1166 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1167    should be returned if the value is only partially undefined.  */
1168 
1169 bool
ssa_undefined_value_p(tree t,bool partial)1170 ssa_undefined_value_p (tree t, bool partial)
1171 {
1172   gimple *def_stmt;
1173   tree var = SSA_NAME_VAR (t);
1174 
1175   if (!var)
1176     ;
1177   /* Parameters get their initial value from the function entry.  */
1178   else if (TREE_CODE (var) == PARM_DECL)
1179     return false;
1180   /* When returning by reference the return address is actually a hidden
1181      parameter.  */
1182   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1183     return false;
1184   /* Hard register variables get their initial value from the ether.  */
1185   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1186     return false;
1187 
1188   /* The value is undefined iff its definition statement is empty.  */
1189   def_stmt = SSA_NAME_DEF_STMT (t);
1190   if (gimple_nop_p (def_stmt))
1191     return true;
1192 
1193   /* Check if the complex was not only partially defined.  */
1194   if (partial && is_gimple_assign (def_stmt)
1195       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1196     {
1197       tree rhs1, rhs2;
1198 
1199       rhs1 = gimple_assign_rhs1 (def_stmt);
1200       rhs2 = gimple_assign_rhs2 (def_stmt);
1201       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1202 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1203     }
1204   return false;
1205 }
1206 
1207 
1208 /* Return TRUE iff STMT, a gimple statement, references an undefined
1209    SSA name.  */
1210 
1211 bool
gimple_uses_undefined_value_p(gimple * stmt)1212 gimple_uses_undefined_value_p (gimple *stmt)
1213 {
1214   ssa_op_iter iter;
1215   tree op;
1216 
1217   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1218     if (ssa_undefined_value_p (op))
1219       return true;
1220 
1221   return false;
1222 }
1223 
1224 
1225 
1226 /* If necessary, rewrite the base of the reference tree *TP from
1227    a MEM_REF to a plain or converted symbol.  */
1228 
1229 static void
maybe_rewrite_mem_ref_base(tree * tp,bitmap suitable_for_renaming)1230 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1231 {
1232   tree sym;
1233 
1234   while (handled_component_p (*tp))
1235     tp = &TREE_OPERAND (*tp, 0);
1236   if (TREE_CODE (*tp) == MEM_REF
1237       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1238       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1239       && DECL_P (sym)
1240       && !TREE_ADDRESSABLE (sym)
1241       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1242     {
1243       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1244 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1245 					TREE_TYPE (TREE_TYPE (sym)))
1246 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1247 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1248 	{
1249 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1250 			TYPE_SIZE (TREE_TYPE (*tp)),
1251 			int_const_binop (MULT_EXPR,
1252 					 bitsize_int (BITS_PER_UNIT),
1253 					 TREE_OPERAND (*tp, 1)));
1254 	}
1255       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1256 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1257 					     TREE_TYPE (TREE_TYPE (sym))))
1258 	{
1259 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1260 			? REALPART_EXPR : IMAGPART_EXPR,
1261 			TREE_TYPE (*tp), sym);
1262 	}
1263       else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1264 	{
1265 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1266 					  TREE_TYPE (sym)))
1267 	    *tp = build1 (VIEW_CONVERT_EXPR,
1268 			  TREE_TYPE (*tp), sym);
1269 	  else
1270 	    *tp = sym;
1271 	}
1272     }
1273 }
1274 
1275 /* For a tree REF return its base if it is the base of a MEM_REF
1276    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1277 
1278 static tree
non_rewritable_mem_ref_base(tree ref)1279 non_rewritable_mem_ref_base (tree ref)
1280 {
1281   tree base = ref;
1282 
1283   /* A plain decl does not need it set.  */
1284   if (DECL_P (ref))
1285     return NULL_TREE;
1286 
1287   while (handled_component_p (base))
1288     base = TREE_OPERAND (base, 0);
1289 
1290   /* But watch out for MEM_REFs we cannot lower to a
1291      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1292   if (TREE_CODE (base) == MEM_REF
1293       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1294     {
1295       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1296       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1297 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1298 	  && useless_type_conversion_p (TREE_TYPE (base),
1299 					TREE_TYPE (TREE_TYPE (decl)))
1300 	  && wi::fits_uhwi_p (mem_ref_offset (base))
1301 	  && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1302 			mem_ref_offset (base))
1303 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1304 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1305 	return NULL_TREE;
1306       if (DECL_P (decl)
1307 	  && (!integer_zerop (TREE_OPERAND (base, 1))
1308 	      || (DECL_SIZE (decl)
1309 		  != TYPE_SIZE (TREE_TYPE (base)))
1310 	      || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1311 	return decl;
1312     }
1313 
1314   return NULL_TREE;
1315 }
1316 
1317 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1318    Otherwise return true.  */
1319 
1320 static bool
non_rewritable_lvalue_p(tree lhs)1321 non_rewritable_lvalue_p (tree lhs)
1322 {
1323   /* A plain decl is always rewritable.  */
1324   if (DECL_P (lhs))
1325     return false;
1326 
1327   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1328      a reasonably efficient manner... */
1329   if ((TREE_CODE (lhs) == REALPART_EXPR
1330        || TREE_CODE (lhs) == IMAGPART_EXPR)
1331       && DECL_P (TREE_OPERAND (lhs, 0)))
1332     return false;
1333 
1334   /* A decl that is wrapped inside a MEM-REF that covers
1335      it full is also rewritable.
1336      ???  The following could be relaxed allowing component
1337      references that do not change the access size.  */
1338   if (TREE_CODE (lhs) == MEM_REF
1339       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1340       && integer_zerop (TREE_OPERAND (lhs, 1)))
1341     {
1342       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1343       if (DECL_P (decl)
1344 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1345 	  /* If the dynamic type of the decl has larger precision than
1346 	     the decl itself we can't use the decls type for SSA rewriting.  */
1347 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1348 	       || compare_tree_int (DECL_SIZE (decl),
1349 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1350 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1351 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1352 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1353 	  /* Make sure we are not re-writing non-float copying into float
1354 	     copying as that can incur normalization.  */
1355 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1356 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1357 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1358 	return false;
1359     }
1360 
1361   return true;
1362 }
1363 
1364 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1365    mark the variable VAR for conversion into SSA.  Return true when updating
1366    stmts is required.  */
1367 
1368 static void
maybe_optimize_var(tree var,bitmap addresses_taken,bitmap not_reg_needs,bitmap suitable_for_renaming)1369 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1370 		    bitmap suitable_for_renaming)
1371 {
1372   /* Global Variables, result decls cannot be changed.  */
1373   if (is_global_var (var)
1374       || TREE_CODE (var) == RESULT_DECL
1375       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1376     return;
1377 
1378   if (TREE_ADDRESSABLE (var)
1379       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1380 	 a non-register.  Otherwise we are confused and forget to
1381 	 add virtual operands for it.  */
1382       && (!is_gimple_reg_type (TREE_TYPE (var))
1383 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1384 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1385 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1386     {
1387       TREE_ADDRESSABLE (var) = 0;
1388       if (is_gimple_reg (var))
1389 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1390       if (dump_file)
1391 	{
1392 	  fprintf (dump_file, "No longer having address taken: ");
1393 	  print_generic_expr (dump_file, var, 0);
1394 	  fprintf (dump_file, "\n");
1395 	}
1396     }
1397 
1398   if (!DECL_GIMPLE_REG_P (var)
1399       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1400       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1401 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1402       && !TREE_THIS_VOLATILE (var)
1403       && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1404     {
1405       DECL_GIMPLE_REG_P (var) = 1;
1406       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1407       if (dump_file)
1408 	{
1409 	  fprintf (dump_file, "Now a gimple register: ");
1410 	  print_generic_expr (dump_file, var, 0);
1411 	  fprintf (dump_file, "\n");
1412 	}
1413     }
1414 }
1415 
1416 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1417 
1418 void
execute_update_addresses_taken(void)1419 execute_update_addresses_taken (void)
1420 {
1421   basic_block bb;
1422   bitmap addresses_taken = BITMAP_ALLOC (NULL);
1423   bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1424   bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1425   tree var;
1426   unsigned i;
1427 
1428   timevar_push (TV_ADDRESS_TAKEN);
1429 
1430   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1431      the function body.  */
1432   FOR_EACH_BB_FN (bb, cfun)
1433     {
1434       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1435 	   gsi_next (&gsi))
1436 	{
1437 	  gimple *stmt = gsi_stmt (gsi);
1438 	  enum gimple_code code = gimple_code (stmt);
1439 	  tree decl;
1440 
1441 	  /* Note all addresses taken by the stmt.  */
1442 	  gimple_ior_addresses_taken (addresses_taken, stmt);
1443 
1444 	  /* If we have a call or an assignment, see if the lhs contains
1445 	     a local decl that requires not to be a gimple register.  */
1446 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1447 	    {
1448               tree lhs = gimple_get_lhs (stmt);
1449               if (lhs
1450 		  && TREE_CODE (lhs) != SSA_NAME
1451 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1452 		      || non_rewritable_lvalue_p (lhs)))
1453 		{
1454 		  decl = get_base_address (lhs);
1455 		  if (DECL_P (decl))
1456 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1457                 }
1458 	    }
1459 
1460 	  if (gimple_assign_single_p (stmt))
1461 	    {
1462 	      tree rhs = gimple_assign_rhs1 (stmt);
1463 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1464 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1465 	    }
1466 
1467 	  else if (code == GIMPLE_CALL)
1468 	    {
1469 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1470 		{
1471 		  tree arg = gimple_call_arg (stmt, i);
1472 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1473 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1474 		}
1475 	    }
1476 
1477 	  else if (code == GIMPLE_ASM)
1478 	    {
1479 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1480 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1481 		{
1482 		  tree link = gimple_asm_output_op (asm_stmt, i);
1483 		  tree lhs = TREE_VALUE (link);
1484 		  if (TREE_CODE (lhs) != SSA_NAME)
1485 		    {
1486 		      decl = get_base_address (lhs);
1487 		      if (DECL_P (decl)
1488 			  && (non_rewritable_lvalue_p (lhs)
1489 			      /* We cannot move required conversions from
1490 				 the lhs to the rhs in asm statements, so
1491 				 require we do not need any.  */
1492 			      || !useless_type_conversion_p
1493 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1494 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1495 		    }
1496 		}
1497 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1498 		{
1499 		  tree link = gimple_asm_input_op (asm_stmt, i);
1500 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1501 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1502 		}
1503 	    }
1504 	}
1505 
1506       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1507 	   gsi_next (&gsi))
1508 	{
1509 	  size_t i;
1510 	  gphi *phi = gsi.phi ();
1511 
1512 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1513 	    {
1514 	      tree op = PHI_ARG_DEF (phi, i), var;
1515 	      if (TREE_CODE (op) == ADDR_EXPR
1516 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1517 		  && DECL_P (var))
1518 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1519 	    }
1520 	}
1521     }
1522 
1523   /* We cannot iterate over all referenced vars because that can contain
1524      unused vars from BLOCK trees, which causes code generation differences
1525      for -g vs. -g0.  */
1526   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1527     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1528 			suitable_for_renaming);
1529 
1530   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1531     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1532 			suitable_for_renaming);
1533 
1534   /* Operand caches need to be recomputed for operands referencing the updated
1535      variables and operands need to be rewritten to expose bare symbols.  */
1536   if (!bitmap_empty_p (suitable_for_renaming))
1537     {
1538       FOR_EACH_BB_FN (bb, cfun)
1539 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1540 	  {
1541 	    gimple *stmt = gsi_stmt (gsi);
1542 
1543 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1544 	       rewrite into SSA form.  */
1545 	    if (gimple_assign_single_p (stmt))
1546 	      {
1547 		tree lhs = gimple_assign_lhs (stmt);
1548 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1549 		tree sym;
1550 
1551 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1552 		   gimplify_modify_expr_complex_part.  */
1553 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1554 		     || TREE_CODE (lhs) == REALPART_EXPR)
1555 		    && DECL_P (TREE_OPERAND (lhs, 0))
1556 		    && bitmap_bit_p (suitable_for_renaming,
1557 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1558 		  {
1559 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1560 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1561 					? REALPART_EXPR : IMAGPART_EXPR,
1562 					TREE_TYPE (other),
1563 					TREE_OPERAND (lhs, 0));
1564 		    gimple *load = gimple_build_assign (other, lrhs);
1565 		    location_t loc = gimple_location (stmt);
1566 		    gimple_set_location (load, loc);
1567 		    gimple_set_vuse (load, gimple_vuse (stmt));
1568 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1569 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1570 		    gimple_assign_set_rhs_with_ops
1571 		      (&gsi, COMPLEX_EXPR,
1572 		       TREE_CODE (lhs) == IMAGPART_EXPR
1573 		       ? other : gimple_assign_rhs1 (stmt),
1574 		       TREE_CODE (lhs) == IMAGPART_EXPR
1575 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1576 		    stmt = gsi_stmt (gsi);
1577 		    unlink_stmt_vdef (stmt);
1578 		    update_stmt (stmt);
1579 		    continue;
1580 		  }
1581 
1582 		/* We shouldn't have any fancy wrapping of
1583 		   component-refs on the LHS, but look through
1584 		   VIEW_CONVERT_EXPRs as that is easy.  */
1585 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1586 		  lhs = TREE_OPERAND (lhs, 0);
1587 		if (TREE_CODE (lhs) == MEM_REF
1588 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1589 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1590 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1591 		    && DECL_P (sym)
1592 		    && !TREE_ADDRESSABLE (sym)
1593 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1594 		  lhs = sym;
1595 		else
1596 		  lhs = gimple_assign_lhs (stmt);
1597 
1598 		/* Rewrite the RHS and make sure the resulting assignment
1599 		   is validly typed.  */
1600 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1601 		rhs = gimple_assign_rhs1 (stmt);
1602 		if (gimple_assign_lhs (stmt) != lhs
1603 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1604 						   TREE_TYPE (rhs)))
1605 		  {
1606 		    if (gimple_clobber_p (stmt))
1607 		      {
1608 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1609 			TREE_THIS_VOLATILE (rhs) = 1;
1610 		      }
1611 		    else
1612 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1613 					 TREE_TYPE (lhs), rhs);
1614 		  }
1615 		if (gimple_assign_lhs (stmt) != lhs)
1616 		  gimple_assign_set_lhs (stmt, lhs);
1617 
1618 		if (gimple_assign_rhs1 (stmt) != rhs)
1619 		  {
1620 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1621 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1622 		  }
1623 	      }
1624 
1625 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1626 	      {
1627 		unsigned i;
1628 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1629 		  {
1630 		    tree *argp = gimple_call_arg_ptr (stmt, i);
1631 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1632 		  }
1633 	      }
1634 
1635 	    else if (gimple_code (stmt) == GIMPLE_ASM)
1636 	      {
1637 		gasm *asm_stmt = as_a <gasm *> (stmt);
1638 		unsigned i;
1639 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1640 		  {
1641 		    tree link = gimple_asm_output_op (asm_stmt, i);
1642 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1643 						suitable_for_renaming);
1644 		  }
1645 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1646 		  {
1647 		    tree link = gimple_asm_input_op (asm_stmt, i);
1648 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1649 						suitable_for_renaming);
1650 		  }
1651 	      }
1652 
1653 	    else if (gimple_debug_bind_p (stmt)
1654 		     && gimple_debug_bind_has_value_p (stmt))
1655 	      {
1656 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1657 		tree decl;
1658 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1659 		decl = non_rewritable_mem_ref_base (*valuep);
1660 		if (decl
1661 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1662 		  gimple_debug_bind_reset_value (stmt);
1663 	      }
1664 
1665 	    if (gimple_references_memory_p (stmt)
1666 		|| is_gimple_debug (stmt))
1667 	      update_stmt (stmt);
1668 
1669 	    gsi_next (&gsi);
1670 	  }
1671 
1672       /* Update SSA form here, we are called as non-pass as well.  */
1673       if (number_of_loops (cfun) > 1
1674 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1675 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1676       else
1677 	update_ssa (TODO_update_ssa);
1678     }
1679 
1680   BITMAP_FREE (not_reg_needs);
1681   BITMAP_FREE (addresses_taken);
1682   BITMAP_FREE (suitable_for_renaming);
1683   timevar_pop (TV_ADDRESS_TAKEN);
1684 }
1685 
1686 namespace {
1687 
1688 const pass_data pass_data_update_address_taken =
1689 {
1690   GIMPLE_PASS, /* type */
1691   "addressables", /* name */
1692   OPTGROUP_NONE, /* optinfo_flags */
1693   TV_ADDRESS_TAKEN, /* tv_id */
1694   PROP_ssa, /* properties_required */
1695   0, /* properties_provided */
1696   0, /* properties_destroyed */
1697   0, /* todo_flags_start */
1698   TODO_update_address_taken, /* todo_flags_finish */
1699 };
1700 
1701 class pass_update_address_taken : public gimple_opt_pass
1702 {
1703 public:
pass_update_address_taken(gcc::context * ctxt)1704   pass_update_address_taken (gcc::context *ctxt)
1705     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1706   {}
1707 
1708   /* opt_pass methods: */
1709 
1710 }; // class pass_update_address_taken
1711 
1712 } // anon namespace
1713 
1714 gimple_opt_pass *
make_pass_update_address_taken(gcc::context * ctxt)1715 make_pass_update_address_taken (gcc::context *ctxt)
1716 {
1717   return new pass_update_address_taken (ctxt);
1718 }
1719