xref: /dragonfly/contrib/gcc-8.0/gcc/tree-ssa.c (revision 335b9e93)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
56 {
57   edge_var_map new_node;
58 
59   if (edge_var_maps == NULL)
60     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61 
62   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63   new_node.def = def;
64   new_node.result = result;
65   new_node.locus = locus;
66 
67   slot.safe_push (new_node);
68 }
69 
70 
71 /* Clear the var mappings in edge E.  */
72 
73 void
74 redirect_edge_var_map_clear (edge e)
75 {
76   if (!edge_var_maps)
77     return;
78 
79   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
80 
81   if (head)
82     head->release ();
83 }
84 
85 
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
87 
88    This assumes a hash_map can have multiple edges mapping to the same
89    var_map (many to one mapping), since we don't remove the previous mappings.
90    */
91 
92 void
93 redirect_edge_var_map_dup (edge newe, edge olde)
94 {
95   if (!edge_var_maps)
96     return;
97 
98   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100   if (!old_head)
101     return;
102 
103   new_head->safe_splice (*old_head);
104 }
105 
106 
107 /* Return the variable mappings for a given edge.  If there is none, return
108    NULL.  */
109 
110 vec<edge_var_map> *
111 redirect_edge_var_map_vector (edge e)
112 {
113   /* Hey, what kind of idiot would... you'd be surprised.  */
114   if (!edge_var_maps)
115     return NULL;
116 
117   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118   if (!slot)
119     return NULL;
120 
121   return slot;
122 }
123 
124 /* Clear the edge variable mappings.  */
125 
126 void
127 redirect_edge_var_map_empty (void)
128 {
129   if (edge_var_maps)
130     edge_var_maps->empty ();
131 }
132 
133 
134 /* Remove the corresponding arguments from the PHI nodes in E's
135    destination block and redirect it to DEST.  Return redirected edge.
136    The list of removed arguments is stored in a vector accessed
137    through edge_var_maps.  */
138 
139 edge
140 ssa_redirect_edge (edge e, basic_block dest)
141 {
142   gphi_iterator gsi;
143   gphi *phi;
144 
145   redirect_edge_var_map_clear (e);
146 
147   /* Remove the appropriate PHI arguments in E's destination block.
148      If we are redirecting a copied edge the destination has not
149      got PHI argument space reserved nor an interesting argument.  */
150   if (! (e->dest->flags & BB_DUPLICATED))
151     for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152       {
153 	tree def;
154 	source_location locus ;
155 
156 	phi = gsi.phi ();
157 	def = gimple_phi_arg_def (phi, e->dest_idx);
158 	locus = gimple_phi_arg_location (phi, e->dest_idx);
159 
160 	if (def == NULL_TREE)
161 	  continue;
162 
163 	redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164       }
165 
166   e = redirect_edge_succ_nodup (e, dest);
167 
168   return e;
169 }
170 
171 
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173    E->dest.  */
174 
175 void
176 flush_pending_stmts (edge e)
177 {
178   gphi *phi;
179   edge_var_map *vm;
180   int i;
181   gphi_iterator gsi;
182 
183   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184   if (!v)
185     return;
186 
187   for (gsi = gsi_start_phis (e->dest), i = 0;
188        !gsi_end_p (gsi) && v->iterate (i, &vm);
189        gsi_next (&gsi), i++)
190     {
191       tree def;
192 
193       phi = gsi.phi ();
194       def = redirect_edge_var_map_def (vm);
195       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196     }
197 
198   redirect_edge_var_map_clear (e);
199 }
200 
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203    expression with a different value.
204 
205    This will update any annotations (say debug bind stmts) referring
206    to the original LHS, so that they use the RHS instead.  This is
207    done even if NLHS and LHS are the same, for it is understood that
208    the RHS will be modified afterwards, and NLHS will not be assigned
209    an equivalent value.
210 
211    Adjusting any non-annotation uses of the LHS, if needed, is a
212    responsibility of the caller.
213 
214    The effect of this call should be pretty much the same as that of
215    inserting a copy of STMT before STMT, and then removing the
216    original stmt, at which time gsi_remove() would have update
217    annotations, but using this function saves all the inserting,
218    copying and removing.  */
219 
220 void
221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223   if (MAY_HAVE_DEBUG_BIND_STMTS)
224     {
225       tree lhs = gimple_get_lhs (stmt);
226 
227       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228 
229       insert_debug_temp_for_var_def (NULL, lhs);
230     }
231 
232   gimple_set_lhs (stmt, nlhs);
233 }
234 
235 
236 /* Given a tree for an expression for which we might want to emit
237    locations or values in debug information (generally a variable, but
238    we might deal with other kinds of trees in the future), return the
239    tree that should be used as the variable of a DEBUG_BIND STMT or
240    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
241 
242 tree
243 target_for_debug_bind (tree var)
244 {
245   if (!MAY_HAVE_DEBUG_BIND_STMTS)
246     return NULL_TREE;
247 
248   if (TREE_CODE (var) == SSA_NAME)
249     {
250       var = SSA_NAME_VAR (var);
251       if (var == NULL_TREE)
252 	return NULL_TREE;
253     }
254 
255   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256       && TREE_CODE (var) != PARM_DECL)
257     return NULL_TREE;
258 
259   if (DECL_HAS_VALUE_EXPR_P (var))
260     return target_for_debug_bind (DECL_VALUE_EXPR (var));
261 
262   if (DECL_IGNORED_P (var))
263     return NULL_TREE;
264 
265   /* var-tracking only tracks registers.  */
266   if (!is_gimple_reg_type (TREE_TYPE (var)))
267     return NULL_TREE;
268 
269   return var;
270 }
271 
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273    released.  */
274 
275 static tree
276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277 {
278   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279 
280   if (wi && wi->is_lhs)
281     return NULL_TREE;
282 
283   if (TREE_CODE (*tp) == SSA_NAME)
284     {
285       if (SSA_NAME_IN_FREE_LIST (*tp))
286 	return *tp;
287 
288       *walk_subtrees = 0;
289     }
290   else if (IS_TYPE_OR_DECL_P (*tp))
291     *walk_subtrees = 0;
292 
293   return NULL_TREE;
294 }
295 
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297    by other DEBUG stmts, and replace uses of the DEF with the
298    newly-created debug temp.  */
299 
300 void
301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302 {
303   imm_use_iterator imm_iter;
304   use_operand_p use_p;
305   gimple *stmt;
306   gimple *def_stmt = NULL;
307   int usecount = 0;
308   tree value = NULL;
309 
310   if (!MAY_HAVE_DEBUG_BIND_STMTS)
311     return;
312 
313   /* If this name has already been registered for replacement, do nothing
314      as anything that uses this name isn't in SSA form.  */
315   if (name_registered_for_update_p (var))
316     return;
317 
318   /* Check whether there are debug stmts that reference this variable and,
319      if there are, decide whether we should use a debug temp.  */
320   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321     {
322       stmt = USE_STMT (use_p);
323 
324       if (!gimple_debug_bind_p (stmt))
325 	continue;
326 
327       if (usecount++)
328 	break;
329 
330       if (gimple_debug_bind_get_value (stmt) != var)
331 	{
332 	  /* Count this as an additional use, so as to make sure we
333 	     use a temp unless VAR's definition has a SINGLE_RHS that
334 	     can be shared.  */
335 	  usecount++;
336 	  break;
337 	}
338     }
339 
340   if (!usecount)
341     return;
342 
343   if (gsi)
344     def_stmt = gsi_stmt (*gsi);
345   else
346     def_stmt = SSA_NAME_DEF_STMT (var);
347 
348   /* If we didn't get an insertion point, and the stmt has already
349      been removed, we won't be able to insert the debug bind stmt, so
350      we'll have to drop debug information.  */
351   if (gimple_code (def_stmt) == GIMPLE_PHI)
352     {
353       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 	value = NULL;
356       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 	 to.  */
358       else if (value == error_mark_node)
359 	value = NULL;
360     }
361   else if (is_gimple_assign (def_stmt))
362     {
363       bool no_value = false;
364 
365       if (!dom_info_available_p (CDI_DOMINATORS))
366 	{
367 	  struct walk_stmt_info wi;
368 
369 	  memset (&wi, 0, sizeof (wi));
370 
371 	  /* When removing blocks without following reverse dominance
372 	     order, we may sometimes encounter SSA_NAMEs that have
373 	     already been released, referenced in other SSA_DEFs that
374 	     we're about to release.  Consider:
375 
376 	     <bb X>:
377 	     v_1 = foo;
378 
379 	     <bb Y>:
380 	     w_2 = v_1 + bar;
381 	     # DEBUG w => w_2
382 
383 	     If we deleted BB X first, propagating the value of w_2
384 	     won't do us any good.  It's too late to recover their
385 	     original definition of v_1: when it was deleted, it was
386 	     only referenced in other DEFs, it couldn't possibly know
387 	     it should have been retained, and propagating every
388 	     single DEF just in case it might have to be propagated
389 	     into a DEBUG STMT would probably be too wasteful.
390 
391 	     When dominator information is not readily available, we
392 	     check for and accept some loss of debug information.  But
393 	     if it is available, there's no excuse for us to remove
394 	     blocks in the wrong order, so we don't even check for
395 	     dead SSA NAMEs.  SSA verification shall catch any
396 	     errors.  */
397 	  if ((!gsi && !gimple_bb (def_stmt))
398 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
399 	    no_value = true;
400 	}
401 
402       if (!no_value)
403 	value = gimple_assign_rhs_to_tree (def_stmt);
404     }
405 
406   if (value)
407     {
408       /* If there's a single use of VAR, and VAR is the entire debug
409 	 expression (usecount would have been incremented again
410 	 otherwise), and the definition involves only constants and
411 	 SSA names, then we can propagate VALUE into this single use,
412 	 avoiding the temp.
413 
414 	 We can also avoid using a temp if VALUE can be shared and
415 	 propagated into all uses, without generating expressions that
416 	 wouldn't be valid gimple RHSs.
417 
418 	 Other cases that would require unsharing or non-gimple RHSs
419 	 are deferred to a debug temp, although we could avoid temps
420 	 at the expense of duplication of expressions.  */
421 
422       if (CONSTANT_CLASS_P (value)
423 	  || gimple_code (def_stmt) == GIMPLE_PHI
424 	  || (usecount == 1
425 	      && (!gimple_assign_single_p (def_stmt)
426 		  || is_gimple_min_invariant (value)))
427 	  || is_gimple_reg (value))
428 	;
429       else
430 	{
431 	  gdebug *def_temp;
432 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
433 
434 	  def_temp = gimple_build_debug_bind (vexpr,
435 					      unshare_expr (value),
436 					      def_stmt);
437 
438 	  DECL_ARTIFICIAL (vexpr) = 1;
439 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
440 	  if (DECL_P (value))
441 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
442 	  else
443 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
444 
445 	  if (gsi)
446 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
447 	  else
448 	    {
449 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
450 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
451 	    }
452 
453 	  value = vexpr;
454 	}
455     }
456 
457   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
458     {
459       if (!gimple_debug_bind_p (stmt))
460 	continue;
461 
462       if (value)
463 	{
464 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
465 	    /* unshare_expr is not needed here.  vexpr is either a
466 	       SINGLE_RHS, that can be safely shared, some other RHS
467 	       that was unshared when we found it had a single debug
468 	       use, or a DEBUG_EXPR_DECL, that can be safely
469 	       shared.  */
470 	    SET_USE (use_p, unshare_expr (value));
471 	  /* If we didn't replace uses with a debug decl fold the
472 	     resulting expression.  Otherwise we end up with invalid IL.  */
473 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
474 	    {
475 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
476 	      fold_stmt_inplace (&gsi);
477 	    }
478 	}
479       else
480 	gimple_debug_bind_reset_value (stmt);
481 
482       update_stmt (stmt);
483     }
484 }
485 
486 
487 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
488    other DEBUG stmts, and replace uses of the DEF with the
489    newly-created debug temp.  */
490 
491 void
492 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
493 {
494   gimple *stmt;
495   ssa_op_iter op_iter;
496   def_operand_p def_p;
497 
498   if (!MAY_HAVE_DEBUG_BIND_STMTS)
499     return;
500 
501   stmt = gsi_stmt (*gsi);
502 
503   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
504     {
505       tree var = DEF_FROM_PTR (def_p);
506 
507       if (TREE_CODE (var) != SSA_NAME)
508 	continue;
509 
510       insert_debug_temp_for_var_def (gsi, var);
511     }
512 }
513 
514 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
515 
516 void
517 reset_debug_uses (gimple *stmt)
518 {
519   ssa_op_iter op_iter;
520   def_operand_p def_p;
521   imm_use_iterator imm_iter;
522   gimple *use_stmt;
523 
524   if (!MAY_HAVE_DEBUG_BIND_STMTS)
525     return;
526 
527   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
528     {
529       tree var = DEF_FROM_PTR (def_p);
530 
531       if (TREE_CODE (var) != SSA_NAME)
532 	continue;
533 
534       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
535 	{
536 	  if (!gimple_debug_bind_p (use_stmt))
537 	    continue;
538 
539 	  gimple_debug_bind_reset_value (use_stmt);
540 	  update_stmt (use_stmt);
541 	}
542     }
543 }
544 
545 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
546    dominated stmts before their dominators, so that release_ssa_defs
547    stands a chance of propagating DEFs into debug bind stmts.  */
548 
549 void
550 release_defs_bitset (bitmap toremove)
551 {
552   unsigned j;
553   bitmap_iterator bi;
554 
555   /* Performing a topological sort is probably overkill, this will
556      most likely run in slightly superlinear time, rather than the
557      pathological quadratic worst case.  */
558   while (!bitmap_empty_p (toremove))
559     {
560       unsigned to_remove_bit = -1U;
561       EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
562 	{
563 	  if (to_remove_bit != -1U)
564 	    {
565 	      bitmap_clear_bit (toremove, to_remove_bit);
566 	      to_remove_bit = -1U;
567 	    }
568 
569 	  bool remove_now = true;
570 	  tree var = ssa_name (j);
571 	  gimple *stmt;
572 	  imm_use_iterator uit;
573 
574 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
575 	    {
576 	      ssa_op_iter dit;
577 	      def_operand_p def_p;
578 
579 	      /* We can't propagate PHI nodes into debug stmts.  */
580 	      if (gimple_code (stmt) == GIMPLE_PHI
581 		  || is_gimple_debug (stmt))
582 		continue;
583 
584 	      /* If we find another definition to remove that uses
585 		 the one we're looking at, defer the removal of this
586 		 one, so that it can be propagated into debug stmts
587 		 after the other is.  */
588 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
589 		{
590 		  tree odef = DEF_FROM_PTR (def_p);
591 
592 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
593 		    {
594 		      remove_now = false;
595 		      break;
596 		    }
597 		}
598 
599 	      if (!remove_now)
600 		BREAK_FROM_IMM_USE_STMT (uit);
601 	    }
602 
603 	  if (remove_now)
604 	    {
605 	      gimple *def = SSA_NAME_DEF_STMT (var);
606 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
607 
608 	      if (gimple_code (def) == GIMPLE_PHI)
609 		remove_phi_node (&gsi, true);
610 	      else
611 		{
612 		  gsi_remove (&gsi, true);
613 		  release_defs (def);
614 		}
615 
616 	      to_remove_bit = j;
617 	    }
618 	}
619       if (to_remove_bit != -1U)
620 	bitmap_clear_bit (toremove, to_remove_bit);
621     }
622 
623 }
624 
625 /* Verify virtual SSA form.  */
626 
627 bool
628 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
629 {
630   bool err = false;
631 
632   if (bitmap_bit_p (visited, bb->index))
633     return false;
634 
635   bitmap_set_bit (visited, bb->index);
636 
637   /* Pick up the single virtual PHI def.  */
638   gphi *phi = NULL;
639   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
640        gsi_next (&si))
641     {
642       tree res = gimple_phi_result (si.phi ());
643       if (virtual_operand_p (res))
644 	{
645 	  if (phi)
646 	    {
647 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
648 	      print_gimple_stmt (stderr, phi, 0);
649 	      print_gimple_stmt (stderr, si.phi (), 0);
650 	      err = true;
651 	    }
652 	  else
653 	    phi = si.phi ();
654 	}
655     }
656   if (phi)
657     {
658       current_vdef = gimple_phi_result (phi);
659       if (TREE_CODE (current_vdef) != SSA_NAME)
660 	{
661 	  error ("virtual definition is not an SSA name");
662 	  print_gimple_stmt (stderr, phi, 0);
663 	  err = true;
664 	}
665     }
666 
667   /* Verify stmts.  */
668   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
669        gsi_next (&gsi))
670     {
671       gimple *stmt = gsi_stmt (gsi);
672       tree vuse = gimple_vuse (stmt);
673       if (vuse)
674 	{
675 	  if (vuse != current_vdef)
676 	    {
677 	      error ("stmt with wrong VUSE");
678 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
679 	      fprintf (stderr, "expected ");
680 	      print_generic_expr (stderr, current_vdef);
681 	      fprintf (stderr, "\n");
682 	      err = true;
683 	    }
684 	  tree vdef = gimple_vdef (stmt);
685 	  if (vdef)
686 	    {
687 	      current_vdef = vdef;
688 	      if (TREE_CODE (current_vdef) != SSA_NAME)
689 		{
690 		  error ("virtual definition is not an SSA name");
691 		  print_gimple_stmt (stderr, phi, 0);
692 		  err = true;
693 		}
694 	    }
695 	}
696     }
697 
698   /* Verify destination PHI uses and recurse.  */
699   edge_iterator ei;
700   edge e;
701   FOR_EACH_EDGE (e, ei, bb->succs)
702     {
703       gphi *phi = get_virtual_phi (e->dest);
704       if (phi
705 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
706 	{
707 	  error ("PHI node with wrong VUSE on edge from BB %d",
708 		 e->src->index);
709 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
710 	  fprintf (stderr, "expected ");
711 	  print_generic_expr (stderr, current_vdef);
712 	  fprintf (stderr, "\n");
713 	  err = true;
714 	}
715 
716       /* Recurse.  */
717       err |= verify_vssa (e->dest, current_vdef, visited);
718     }
719 
720   return err;
721 }
722 
723 /* Return true if SSA_NAME is malformed and mark it visited.
724 
725    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
726       operand.  */
727 
728 static bool
729 verify_ssa_name (tree ssa_name, bool is_virtual)
730 {
731   if (TREE_CODE (ssa_name) != SSA_NAME)
732     {
733       error ("expected an SSA_NAME object");
734       return true;
735     }
736 
737   if (SSA_NAME_IN_FREE_LIST (ssa_name))
738     {
739       error ("found an SSA_NAME that had been released into the free pool");
740       return true;
741     }
742 
743   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
744       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
745     {
746       error ("type mismatch between an SSA_NAME and its symbol");
747       return true;
748     }
749 
750   if (is_virtual && !virtual_operand_p (ssa_name))
751     {
752       error ("found a virtual definition for a GIMPLE register");
753       return true;
754     }
755 
756   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
757     {
758       error ("virtual SSA name for non-VOP decl");
759       return true;
760     }
761 
762   if (!is_virtual && virtual_operand_p (ssa_name))
763     {
764       error ("found a real definition for a non-register");
765       return true;
766     }
767 
768   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
769       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
770     {
771       error ("found a default name with a non-empty defining statement");
772       return true;
773     }
774 
775   return false;
776 }
777 
778 
779 /* Return true if the definition of SSA_NAME at block BB is malformed.
780 
781    STMT is the statement where SSA_NAME is created.
782 
783    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
784       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
785       it means that the block in that array slot contains the
786       definition of SSA_NAME.
787 
788    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
789 
790 static bool
791 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
792 	    gimple *stmt, bool is_virtual)
793 {
794   if (verify_ssa_name (ssa_name, is_virtual))
795     goto err;
796 
797   if (SSA_NAME_VAR (ssa_name)
798       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
799       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
800     {
801       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
802       goto err;
803     }
804 
805   if (definition_block[SSA_NAME_VERSION (ssa_name)])
806     {
807       error ("SSA_NAME created in two different blocks %i and %i",
808 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
809       goto err;
810     }
811 
812   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
813 
814   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
815     {
816       error ("SSA_NAME_DEF_STMT is wrong");
817       fprintf (stderr, "Expected definition statement:\n");
818       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
819       fprintf (stderr, "\nActual definition statement:\n");
820       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
821       goto err;
822     }
823 
824   return false;
825 
826 err:
827   fprintf (stderr, "while verifying SSA_NAME ");
828   print_generic_expr (stderr, ssa_name);
829   fprintf (stderr, " in statement\n");
830   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
831 
832   return true;
833 }
834 
835 
836 /* Return true if the use of SSA_NAME at statement STMT in block BB is
837    malformed.
838 
839    DEF_BB is the block where SSA_NAME was found to be created.
840 
841    IDOM contains immediate dominator information for the flowgraph.
842 
843    CHECK_ABNORMAL is true if the caller wants to check whether this use
844       is flowing through an abnormal edge (only used when checking PHI
845       arguments).
846 
847    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
848      that are defined before STMT in basic block BB.  */
849 
850 static bool
851 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
852 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
853 {
854   bool err = false;
855   tree ssa_name = USE_FROM_PTR (use_p);
856 
857   if (!TREE_VISITED (ssa_name))
858     if (verify_imm_links (stderr, ssa_name))
859       err = true;
860 
861   TREE_VISITED (ssa_name) = 1;
862 
863   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
864       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
865     ; /* Default definitions have empty statements.  Nothing to do.  */
866   else if (!def_bb)
867     {
868       error ("missing definition");
869       err = true;
870     }
871   else if (bb != def_bb
872 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
873     {
874       error ("definition in block %i does not dominate use in block %i",
875 	     def_bb->index, bb->index);
876       err = true;
877     }
878   else if (bb == def_bb
879 	   && names_defined_in_bb != NULL
880 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
881     {
882       error ("definition in block %i follows the use", def_bb->index);
883       err = true;
884     }
885 
886   if (check_abnormal
887       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
888     {
889       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
890       err = true;
891     }
892 
893   /* Make sure the use is in an appropriate list by checking the previous
894      element to make sure it's the same.  */
895   if (use_p->prev == NULL)
896     {
897       error ("no immediate_use list");
898       err = true;
899     }
900   else
901     {
902       tree listvar;
903       if (use_p->prev->use == NULL)
904 	listvar = use_p->prev->loc.ssa_name;
905       else
906 	listvar = USE_FROM_PTR (use_p->prev);
907       if (listvar != ssa_name)
908         {
909 	  error ("wrong immediate use list");
910 	  err = true;
911 	}
912     }
913 
914   if (err)
915     {
916       fprintf (stderr, "for SSA_NAME: ");
917       print_generic_expr (stderr, ssa_name, TDF_VOPS);
918       fprintf (stderr, " in statement:\n");
919       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
920     }
921 
922   return err;
923 }
924 
925 
926 /* Return true if any of the arguments for PHI node PHI at block BB is
927    malformed.
928 
929    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
930       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
931       it means that the block in that array slot contains the
932       definition of SSA_NAME.  */
933 
934 static bool
935 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
936 {
937   edge e;
938   bool err = false;
939   size_t i, phi_num_args = gimple_phi_num_args (phi);
940 
941   if (EDGE_COUNT (bb->preds) != phi_num_args)
942     {
943       error ("incoming edge count does not match number of PHI arguments");
944       err = true;
945       goto error;
946     }
947 
948   for (i = 0; i < phi_num_args; i++)
949     {
950       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
951       tree op = USE_FROM_PTR (op_p);
952 
953       e = EDGE_PRED (bb, i);
954 
955       if (op == NULL_TREE)
956 	{
957 	  error ("PHI argument is missing for edge %d->%d",
958 	         e->src->index,
959 		 e->dest->index);
960 	  err = true;
961 	  goto error;
962 	}
963 
964       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
965 	{
966 	  error ("PHI argument is not SSA_NAME, or invariant");
967 	  err = true;
968 	}
969 
970       if (TREE_CODE (op) == SSA_NAME)
971 	{
972 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
973 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
974 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
975 	}
976 
977       if (TREE_CODE (op) == ADDR_EXPR)
978 	{
979 	  tree base = TREE_OPERAND (op, 0);
980 	  while (handled_component_p (base))
981 	    base = TREE_OPERAND (base, 0);
982 	  if ((VAR_P (base)
983 	       || TREE_CODE (base) == PARM_DECL
984 	       || TREE_CODE (base) == RESULT_DECL)
985 	      && !TREE_ADDRESSABLE (base))
986 	    {
987 	      error ("address taken, but ADDRESSABLE bit not set");
988 	      err = true;
989 	    }
990 	}
991 
992       if (e->dest != bb)
993 	{
994 	  error ("wrong edge %d->%d for PHI argument",
995 	         e->src->index, e->dest->index);
996 	  err = true;
997 	}
998 
999       if (err)
1000 	{
1001 	  fprintf (stderr, "PHI argument\n");
1002 	  print_generic_stmt (stderr, op, TDF_VOPS);
1003 	  goto error;
1004 	}
1005     }
1006 
1007 error:
1008   if (err)
1009     {
1010       fprintf (stderr, "for PHI node\n");
1011       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1012     }
1013 
1014 
1015   return err;
1016 }
1017 
1018 
1019 /* Verify common invariants in the SSA web.
1020    TODO: verify the variable annotations.  */
1021 
1022 DEBUG_FUNCTION void
1023 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1024 {
1025   basic_block bb;
1026   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1027   ssa_op_iter iter;
1028   tree op;
1029   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1030   auto_bitmap names_defined_in_bb;
1031 
1032   gcc_assert (!need_ssa_update_p (cfun));
1033 
1034   timevar_push (TV_TREE_SSA_VERIFY);
1035 
1036     {
1037       /* Keep track of SSA names present in the IL.  */
1038       size_t i;
1039       tree name;
1040       hash_map <void *, tree> ssa_info;
1041 
1042       FOR_EACH_SSA_NAME (i, name, cfun)
1043 	{
1044 	  gimple *stmt;
1045 	  TREE_VISITED (name) = 0;
1046 
1047 	  verify_ssa_name (name, virtual_operand_p (name));
1048 
1049 	  stmt = SSA_NAME_DEF_STMT (name);
1050 	  if (!gimple_nop_p (stmt))
1051 	    {
1052 	      basic_block bb = gimple_bb (stmt);
1053 	      if (verify_def (bb, definition_block,
1054 			      name, stmt, virtual_operand_p (name)))
1055 		goto err;
1056 	    }
1057 
1058 	  void *info = NULL;
1059 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1060 	    info = SSA_NAME_PTR_INFO (name);
1061 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1062 	    info = SSA_NAME_RANGE_INFO (name);
1063 	  if (info)
1064 	    {
1065 	      bool existed;
1066 	      tree &val = ssa_info.get_or_insert (info, &existed);
1067 	      if (existed)
1068 		{
1069 		  error ("shared SSA name info");
1070 		  print_generic_expr (stderr, val);
1071 		  fprintf (stderr, " and ");
1072 		  print_generic_expr (stderr, name);
1073 		  fprintf (stderr, "\n");
1074 		  goto err;
1075 		}
1076 	      else
1077 		val = name;
1078 	    }
1079 	}
1080     }
1081 
1082   calculate_dominance_info (CDI_DOMINATORS);
1083 
1084   /* Now verify all the uses and make sure they agree with the definitions
1085      found in the previous pass.  */
1086   FOR_EACH_BB_FN (bb, cfun)
1087     {
1088       edge e;
1089       edge_iterator ei;
1090 
1091       /* Make sure that all edges have a clear 'aux' field.  */
1092       FOR_EACH_EDGE (e, ei, bb->preds)
1093 	{
1094 	  if (e->aux)
1095 	    {
1096 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1097 		      e->dest->index);
1098 	      goto err;
1099 	    }
1100 	}
1101 
1102       /* Verify the arguments for every PHI node in the block.  */
1103       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1104 	{
1105 	  gphi *phi = gsi.phi ();
1106 	  if (verify_phi_args (phi, bb, definition_block))
1107 	    goto err;
1108 
1109 	  bitmap_set_bit (names_defined_in_bb,
1110 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1111 	}
1112 
1113       /* Now verify all the uses and vuses in every statement of the block.  */
1114       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1115 	   gsi_next (&gsi))
1116 	{
1117 	  gimple *stmt = gsi_stmt (gsi);
1118 	  use_operand_p use_p;
1119 
1120 	  if (check_modified_stmt && gimple_modified_p (stmt))
1121 	    {
1122 	      error ("stmt (%p) marked modified after optimization pass: ",
1123 		     (void *)stmt);
1124 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1125 	      goto err;
1126 	    }
1127 
1128 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1129 	    {
1130 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1131 	      goto err;
1132 	    }
1133 
1134 	  if (gimple_debug_bind_p (stmt)
1135 	      && !gimple_debug_bind_has_value_p (stmt))
1136 	    continue;
1137 
1138 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1139 	    {
1140 	      op = USE_FROM_PTR (use_p);
1141 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1142 			      use_p, stmt, false, names_defined_in_bb))
1143 		goto err;
1144 	    }
1145 
1146 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1147 	    {
1148 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1149 		{
1150 		  error ("SSA_NAME_DEF_STMT is wrong");
1151 		  fprintf (stderr, "Expected definition statement:\n");
1152 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1153 		  fprintf (stderr, "\nActual definition statement:\n");
1154 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1155 				     4, TDF_VOPS);
1156 		  goto err;
1157 		}
1158 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1159 	    }
1160 	}
1161 
1162       bitmap_clear (names_defined_in_bb);
1163     }
1164 
1165   free (definition_block);
1166 
1167   if (gimple_vop (cfun)
1168       && ssa_default_def (cfun, gimple_vop (cfun)))
1169     {
1170       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1171       bitmap_clear (visited);
1172       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1173 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1174 	goto err;
1175     }
1176 
1177   /* Restore the dominance information to its prior known state, so
1178      that we do not perturb the compiler's subsequent behavior.  */
1179   if (orig_dom_state == DOM_NONE)
1180     free_dominance_info (CDI_DOMINATORS);
1181   else
1182     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1183 
1184   timevar_pop (TV_TREE_SSA_VERIFY);
1185   return;
1186 
1187 err:
1188   internal_error ("verify_ssa failed");
1189 }
1190 
1191 
1192 /* Initialize global DFA and SSA structures.  */
1193 
1194 void
1195 init_tree_ssa (struct function *fn)
1196 {
1197   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1198   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1199   pt_solution_reset (&fn->gimple_df->escaped);
1200   init_ssanames (fn, 0);
1201 }
1202 
1203 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1204 
1205 void
1206 delete_tree_ssa (struct function *fn)
1207 {
1208   fini_ssanames (fn);
1209 
1210   /* We no longer maintain the SSA operand cache at this point.  */
1211   if (ssa_operands_active (fn))
1212     fini_ssa_operands (fn);
1213 
1214   fn->gimple_df->default_defs->empty ();
1215   fn->gimple_df->default_defs = NULL;
1216   pt_solution_reset (&fn->gimple_df->escaped);
1217   if (fn->gimple_df->decls_to_pointers != NULL)
1218     delete fn->gimple_df->decls_to_pointers;
1219   fn->gimple_df->decls_to_pointers = NULL;
1220   fn->gimple_df = NULL;
1221 
1222   /* We no longer need the edge variable maps.  */
1223   redirect_edge_var_map_empty ();
1224 }
1225 
1226 /* Return true if EXPR is a useless type conversion, otherwise return
1227    false.  */
1228 
1229 bool
1230 tree_ssa_useless_type_conversion (tree expr)
1231 {
1232   /* If we have an assignment that merely uses a NOP_EXPR to change
1233      the top of the RHS to the type of the LHS and the type conversion
1234      is "safe", then strip away the type conversion so that we can
1235      enter LHS = RHS into the const_and_copies table.  */
1236   if (CONVERT_EXPR_P (expr)
1237       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1238       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1239     return useless_type_conversion_p
1240       (TREE_TYPE (expr),
1241        TREE_TYPE (TREE_OPERAND (expr, 0)));
1242 
1243   return false;
1244 }
1245 
1246 /* Strip conversions from EXP according to
1247    tree_ssa_useless_type_conversion and return the resulting
1248    expression.  */
1249 
1250 tree
1251 tree_ssa_strip_useless_type_conversions (tree exp)
1252 {
1253   while (tree_ssa_useless_type_conversion (exp))
1254     exp = TREE_OPERAND (exp, 0);
1255   return exp;
1256 }
1257 
1258 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1259 
1260 bool
1261 ssa_defined_default_def_p (tree t)
1262 {
1263   tree var = SSA_NAME_VAR (t);
1264 
1265   if (!var)
1266     ;
1267   /* Parameters get their initial value from the function entry.  */
1268   else if (TREE_CODE (var) == PARM_DECL)
1269     return true;
1270   /* When returning by reference the return address is actually a hidden
1271      parameter.  */
1272   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1273     return true;
1274   /* Hard register variables get their initial value from the ether.  */
1275   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1276     return true;
1277 
1278   return false;
1279 }
1280 
1281 
1282 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1283    should be returned if the value is only partially undefined.  */
1284 
1285 bool
1286 ssa_undefined_value_p (tree t, bool partial)
1287 {
1288   gimple *def_stmt;
1289 
1290   if (ssa_defined_default_def_p (t))
1291     return false;
1292 
1293   /* The value is undefined iff its definition statement is empty.  */
1294   def_stmt = SSA_NAME_DEF_STMT (t);
1295   if (gimple_nop_p (def_stmt))
1296     return true;
1297 
1298   /* Check if the complex was not only partially defined.  */
1299   if (partial && is_gimple_assign (def_stmt)
1300       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1301     {
1302       tree rhs1, rhs2;
1303 
1304       rhs1 = gimple_assign_rhs1 (def_stmt);
1305       rhs2 = gimple_assign_rhs2 (def_stmt);
1306       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1307 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1308     }
1309   return false;
1310 }
1311 
1312 
1313 /* Return TRUE iff STMT, a gimple statement, references an undefined
1314    SSA name.  */
1315 
1316 bool
1317 gimple_uses_undefined_value_p (gimple *stmt)
1318 {
1319   ssa_op_iter iter;
1320   tree op;
1321 
1322   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1323     if (ssa_undefined_value_p (op))
1324       return true;
1325 
1326   return false;
1327 }
1328 
1329 
1330 
1331 /* If necessary, rewrite the base of the reference tree *TP from
1332    a MEM_REF to a plain or converted symbol.  */
1333 
1334 static void
1335 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1336 {
1337   tree sym;
1338 
1339   while (handled_component_p (*tp))
1340     tp = &TREE_OPERAND (*tp, 0);
1341   if (TREE_CODE (*tp) == MEM_REF
1342       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1343       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1344       && DECL_P (sym)
1345       && !TREE_ADDRESSABLE (sym)
1346       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1347       && is_gimple_reg_type (TREE_TYPE (*tp))
1348       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1349     {
1350       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1351 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1352 					TREE_TYPE (TREE_TYPE (sym)))
1353 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1354 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1355 	{
1356 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1357 			TYPE_SIZE (TREE_TYPE (*tp)),
1358 			int_const_binop (MULT_EXPR,
1359 					 bitsize_int (BITS_PER_UNIT),
1360 					 TREE_OPERAND (*tp, 1)));
1361 	}
1362       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1363 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1364 					     TREE_TYPE (TREE_TYPE (sym))))
1365 	{
1366 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1367 			? REALPART_EXPR : IMAGPART_EXPR,
1368 			TREE_TYPE (*tp), sym);
1369 	}
1370       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1371 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1372 	{
1373 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1374 					  TREE_TYPE (sym)))
1375 	    *tp = build1 (VIEW_CONVERT_EXPR,
1376 			  TREE_TYPE (*tp), sym);
1377 	  else
1378 	    *tp = sym;
1379 	}
1380       else if (DECL_SIZE (sym)
1381 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1382 	       && (known_subrange_p
1383 		   (mem_ref_offset (*tp),
1384 		    wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1385 		    0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1386 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1387 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1388 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1389 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1390 				  BITS_PER_UNIT) == 0)
1391 	{
1392 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1393 			TYPE_SIZE (TREE_TYPE (*tp)),
1394 			wide_int_to_tree (bitsizetype,
1395 					  mem_ref_offset (*tp)
1396 					  << LOG2_BITS_PER_UNIT));
1397 	}
1398     }
1399 }
1400 
1401 /* For a tree REF return its base if it is the base of a MEM_REF
1402    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1403 
1404 static tree
1405 non_rewritable_mem_ref_base (tree ref)
1406 {
1407   tree base;
1408 
1409   /* A plain decl does not need it set.  */
1410   if (DECL_P (ref))
1411     return NULL_TREE;
1412 
1413   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1414     {
1415       base = get_base_address (ref);
1416       if (DECL_P (base))
1417 	return base;
1418       return NULL_TREE;
1419     }
1420 
1421   /* But watch out for MEM_REFs we cannot lower to a
1422      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1423   if (TREE_CODE (base) == MEM_REF
1424       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1425     {
1426       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1427       if (! DECL_P (decl))
1428 	return NULL_TREE;
1429       if (! is_gimple_reg_type (TREE_TYPE (base))
1430 	  || VOID_TYPE_P (TREE_TYPE (base))
1431 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1432 	return decl;
1433       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1434 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1435 	  && useless_type_conversion_p (TREE_TYPE (base),
1436 					TREE_TYPE (TREE_TYPE (decl)))
1437 	  && known_ge (mem_ref_offset (base), 0)
1438 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1439 		       mem_ref_offset (base))
1440 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1441 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1442 	return NULL_TREE;
1443       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1444       if (integer_zerop (TREE_OPERAND (base, 1))
1445 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1446 	return NULL_TREE;
1447       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1448       if (DECL_SIZE (decl)
1449 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1450 	  && (known_subrange_p
1451 	      (mem_ref_offset (base),
1452 	       wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1453 	       0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1454 	  /* ???  We can't handle bitfield precision extracts without
1455 	     either using an alternate type for the BIT_FIELD_REF and
1456 	     then doing a conversion or possibly adjusting the offset
1457 	     according to endianness.  */
1458 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1459 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1460 		  == TYPE_PRECISION (TREE_TYPE (base))))
1461 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1462 			     BITS_PER_UNIT) == 0)
1463 	return NULL_TREE;
1464       return decl;
1465     }
1466 
1467   return NULL_TREE;
1468 }
1469 
1470 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1471    Otherwise return true.  */
1472 
1473 static bool
1474 non_rewritable_lvalue_p (tree lhs)
1475 {
1476   /* A plain decl is always rewritable.  */
1477   if (DECL_P (lhs))
1478     return false;
1479 
1480   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1481      a reasonably efficient manner... */
1482   if ((TREE_CODE (lhs) == REALPART_EXPR
1483        || TREE_CODE (lhs) == IMAGPART_EXPR)
1484       && DECL_P (TREE_OPERAND (lhs, 0)))
1485     return false;
1486 
1487   /* ???  The following could be relaxed allowing component
1488      references that do not change the access size.  */
1489   if (TREE_CODE (lhs) == MEM_REF
1490       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1491     {
1492       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1493 
1494       /* A decl that is wrapped inside a MEM-REF that covers
1495 	 it full is also rewritable.  */
1496       if (integer_zerop (TREE_OPERAND (lhs, 1))
1497 	  && DECL_P (decl)
1498 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1499 	  /* If the dynamic type of the decl has larger precision than
1500 	     the decl itself we can't use the decls type for SSA rewriting.  */
1501 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1502 	       || compare_tree_int (DECL_SIZE (decl),
1503 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1504 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1505 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1506 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1507 	  /* Make sure we are not re-writing non-float copying into float
1508 	     copying as that can incur normalization.  */
1509 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1510 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1511 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1512 	return false;
1513 
1514       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1515 	 using a BIT_INSERT_EXPR.  */
1516       if (DECL_P (decl)
1517 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1518 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1519 	  && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1520 			      TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0)
1521 	  && known_ge (mem_ref_offset (lhs), 0)
1522 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1523 		       mem_ref_offset (lhs))
1524 	  && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1525 			    TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1526 	return false;
1527     }
1528 
1529   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1530      BIT_INSERT_EXPR.  */
1531   if (TREE_CODE (lhs) == BIT_FIELD_REF
1532       && DECL_P (TREE_OPERAND (lhs, 0))
1533       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1534       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1535       && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1536 			  TYPE_SIZE_UNIT
1537 			    (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1538       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1539 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1540     return false;
1541 
1542   return true;
1543 }
1544 
1545 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1546    mark the variable VAR for conversion into SSA.  Return true when updating
1547    stmts is required.  */
1548 
1549 static void
1550 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1551 		    bitmap suitable_for_renaming)
1552 {
1553   /* Global Variables, result decls cannot be changed.  */
1554   if (is_global_var (var)
1555       || TREE_CODE (var) == RESULT_DECL
1556       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1557     return;
1558 
1559   if (TREE_ADDRESSABLE (var)
1560       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1561 	 a non-register.  Otherwise we are confused and forget to
1562 	 add virtual operands for it.  */
1563       && (!is_gimple_reg_type (TREE_TYPE (var))
1564 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1565 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1566 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1567     {
1568       TREE_ADDRESSABLE (var) = 0;
1569       if (is_gimple_reg (var))
1570 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1571       if (dump_file)
1572 	{
1573 	  fprintf (dump_file, "No longer having address taken: ");
1574 	  print_generic_expr (dump_file, var);
1575 	  fprintf (dump_file, "\n");
1576 	}
1577     }
1578 
1579   if (!DECL_GIMPLE_REG_P (var)
1580       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1581       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1582 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1583       && !TREE_THIS_VOLATILE (var)
1584       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1585     {
1586       DECL_GIMPLE_REG_P (var) = 1;
1587       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1588       if (dump_file)
1589 	{
1590 	  fprintf (dump_file, "Now a gimple register: ");
1591 	  print_generic_expr (dump_file, var);
1592 	  fprintf (dump_file, "\n");
1593 	}
1594     }
1595 }
1596 
1597 /* Return true when STMT is ASAN mark where second argument is an address
1598    of a local variable.  */
1599 
1600 static bool
1601 is_asan_mark_p (gimple *stmt)
1602 {
1603   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1604     return false;
1605 
1606   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1607   if (TREE_CODE (addr) == ADDR_EXPR
1608       && VAR_P (TREE_OPERAND (addr, 0)))
1609     {
1610       tree var = TREE_OPERAND (addr, 0);
1611       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1612 			    DECL_ATTRIBUTES (var)))
1613 	return false;
1614 
1615       unsigned addressable = TREE_ADDRESSABLE (var);
1616       TREE_ADDRESSABLE (var) = 0;
1617       bool r = is_gimple_reg (var);
1618       TREE_ADDRESSABLE (var) = addressable;
1619       return r;
1620     }
1621 
1622   return false;
1623 }
1624 
1625 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1626 
1627 void
1628 execute_update_addresses_taken (void)
1629 {
1630   basic_block bb;
1631   auto_bitmap addresses_taken;
1632   auto_bitmap not_reg_needs;
1633   auto_bitmap suitable_for_renaming;
1634   tree var;
1635   unsigned i;
1636 
1637   timevar_push (TV_ADDRESS_TAKEN);
1638 
1639   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1640      the function body.  */
1641   FOR_EACH_BB_FN (bb, cfun)
1642     {
1643       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1644 	   gsi_next (&gsi))
1645 	{
1646 	  gimple *stmt = gsi_stmt (gsi);
1647 	  enum gimple_code code = gimple_code (stmt);
1648 	  tree decl;
1649 
1650 	  if (code == GIMPLE_CALL)
1651 	    {
1652 	      if (optimize_atomic_compare_exchange_p (stmt))
1653 		{
1654 		  /* For __atomic_compare_exchange_N if the second argument
1655 		     is &var, don't mark var addressable;
1656 		     if it becomes non-addressable, we'll rewrite it into
1657 		     ATOMIC_COMPARE_EXCHANGE call.  */
1658 		  tree arg = gimple_call_arg (stmt, 1);
1659 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1660 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1661 		  gimple_call_set_arg (stmt, 1, arg);
1662 		}
1663 	      else if (is_asan_mark_p (stmt)
1664 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1665 		;
1666 	      else
1667 		gimple_ior_addresses_taken (addresses_taken, stmt);
1668 	    }
1669 	  else
1670 	    /* Note all addresses taken by the stmt.  */
1671 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1672 
1673 	  /* If we have a call or an assignment, see if the lhs contains
1674 	     a local decl that requires not to be a gimple register.  */
1675 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1676 	    {
1677               tree lhs = gimple_get_lhs (stmt);
1678               if (lhs
1679 		  && TREE_CODE (lhs) != SSA_NAME
1680 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1681 		      || non_rewritable_lvalue_p (lhs)))
1682 		{
1683 		  decl = get_base_address (lhs);
1684 		  if (DECL_P (decl))
1685 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1686                 }
1687 	    }
1688 
1689 	  if (gimple_assign_single_p (stmt))
1690 	    {
1691 	      tree rhs = gimple_assign_rhs1 (stmt);
1692 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1693 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1694 	    }
1695 
1696 	  else if (code == GIMPLE_CALL)
1697 	    {
1698 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1699 		{
1700 		  tree arg = gimple_call_arg (stmt, i);
1701 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1702 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1703 		}
1704 	    }
1705 
1706 	  else if (code == GIMPLE_ASM)
1707 	    {
1708 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1709 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1710 		{
1711 		  tree link = gimple_asm_output_op (asm_stmt, i);
1712 		  tree lhs = TREE_VALUE (link);
1713 		  if (TREE_CODE (lhs) != SSA_NAME)
1714 		    {
1715 		      decl = get_base_address (lhs);
1716 		      if (DECL_P (decl)
1717 			  && (non_rewritable_lvalue_p (lhs)
1718 			      /* We cannot move required conversions from
1719 				 the lhs to the rhs in asm statements, so
1720 				 require we do not need any.  */
1721 			      || !useless_type_conversion_p
1722 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1723 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1724 		    }
1725 		}
1726 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1727 		{
1728 		  tree link = gimple_asm_input_op (asm_stmt, i);
1729 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1730 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1731 		}
1732 	    }
1733 	}
1734 
1735       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1736 	   gsi_next (&gsi))
1737 	{
1738 	  size_t i;
1739 	  gphi *phi = gsi.phi ();
1740 
1741 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1742 	    {
1743 	      tree op = PHI_ARG_DEF (phi, i), var;
1744 	      if (TREE_CODE (op) == ADDR_EXPR
1745 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1746 		  && DECL_P (var))
1747 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1748 	    }
1749 	}
1750     }
1751 
1752   /* We cannot iterate over all referenced vars because that can contain
1753      unused vars from BLOCK trees, which causes code generation differences
1754      for -g vs. -g0.  */
1755   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1756     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1757 			suitable_for_renaming);
1758 
1759   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1760     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1761 			suitable_for_renaming);
1762 
1763   /* Operand caches need to be recomputed for operands referencing the updated
1764      variables and operands need to be rewritten to expose bare symbols.  */
1765   if (!bitmap_empty_p (suitable_for_renaming))
1766     {
1767       FOR_EACH_BB_FN (bb, cfun)
1768 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1769 	  {
1770 	    gimple *stmt = gsi_stmt (gsi);
1771 
1772 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1773 	       rewrite into SSA form.  */
1774 	    if (gimple_assign_single_p (stmt))
1775 	      {
1776 		tree lhs = gimple_assign_lhs (stmt);
1777 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1778 		tree sym;
1779 
1780 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1781 		   gimplify_modify_expr_complex_part.  */
1782 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1783 		     || TREE_CODE (lhs) == REALPART_EXPR)
1784 		    && DECL_P (TREE_OPERAND (lhs, 0))
1785 		    && bitmap_bit_p (suitable_for_renaming,
1786 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1787 		  {
1788 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1789 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1790 					? REALPART_EXPR : IMAGPART_EXPR,
1791 					TREE_TYPE (other),
1792 					TREE_OPERAND (lhs, 0));
1793 		    gimple *load = gimple_build_assign (other, lrhs);
1794 		    location_t loc = gimple_location (stmt);
1795 		    gimple_set_location (load, loc);
1796 		    gimple_set_vuse (load, gimple_vuse (stmt));
1797 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1798 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1799 		    gimple_assign_set_rhs_with_ops
1800 		      (&gsi, COMPLEX_EXPR,
1801 		       TREE_CODE (lhs) == IMAGPART_EXPR
1802 		       ? other : gimple_assign_rhs1 (stmt),
1803 		       TREE_CODE (lhs) == IMAGPART_EXPR
1804 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1805 		    stmt = gsi_stmt (gsi);
1806 		    unlink_stmt_vdef (stmt);
1807 		    update_stmt (stmt);
1808 		    continue;
1809 		  }
1810 
1811 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1812 		   into a BIT_INSERT_EXPR.  */
1813 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1814 		    && DECL_P (TREE_OPERAND (lhs, 0))
1815 		    && bitmap_bit_p (suitable_for_renaming,
1816 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1817 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1818 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1819 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1820 					TYPE_SIZE_UNIT (TREE_TYPE
1821 					  (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1822 					0)
1823 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1824 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1825 		  {
1826 		    tree var = TREE_OPERAND (lhs, 0);
1827 		    tree val = gimple_assign_rhs1 (stmt);
1828 		    if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1829 					      TREE_TYPE (val)))
1830 		      {
1831 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1832 			gimple *pun
1833 			  = gimple_build_assign (tem,
1834 						 build1 (VIEW_CONVERT_EXPR,
1835 							 TREE_TYPE (tem), val));
1836 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1837 			val = tem;
1838 		      }
1839 		    tree bitpos = TREE_OPERAND (lhs, 2);
1840 		    gimple_assign_set_lhs (stmt, var);
1841 		    gimple_assign_set_rhs_with_ops
1842 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1843 		    stmt = gsi_stmt (gsi);
1844 		    unlink_stmt_vdef (stmt);
1845 		    update_stmt (stmt);
1846 		    continue;
1847 		  }
1848 
1849 		/* Rewrite a vector insert using a MEM_REF on the LHS
1850 		   into a BIT_INSERT_EXPR.  */
1851 		if (TREE_CODE (lhs) == MEM_REF
1852 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1853 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1854 		    && DECL_P (sym)
1855 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1856 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1857 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1858 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1859 					TYPE_SIZE_UNIT
1860 					  (TREE_TYPE (TREE_TYPE (sym))), 0)
1861 		    && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1862 		    && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1863 					TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1864 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1865 			% tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1866 		  {
1867 		    tree val = gimple_assign_rhs1 (stmt);
1868 		    if (! types_compatible_p (TREE_TYPE (val),
1869 					      TREE_TYPE (TREE_TYPE (sym))))
1870 		      {
1871 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym)));
1872 			gimple *pun
1873 			  = gimple_build_assign (tem,
1874 						 build1 (VIEW_CONVERT_EXPR,
1875 							 TREE_TYPE (tem), val));
1876 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1877 			val = tem;
1878 		      }
1879 		    tree bitpos
1880 		      = wide_int_to_tree (bitsizetype,
1881 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1882 		    gimple_assign_set_lhs (stmt, sym);
1883 		    gimple_assign_set_rhs_with_ops
1884 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1885 		    stmt = gsi_stmt (gsi);
1886 		    unlink_stmt_vdef (stmt);
1887 		    update_stmt (stmt);
1888 		    continue;
1889 		  }
1890 
1891 		/* We shouldn't have any fancy wrapping of
1892 		   component-refs on the LHS, but look through
1893 		   VIEW_CONVERT_EXPRs as that is easy.  */
1894 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1895 		  lhs = TREE_OPERAND (lhs, 0);
1896 		if (TREE_CODE (lhs) == MEM_REF
1897 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1898 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1899 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1900 		    && DECL_P (sym)
1901 		    && !TREE_ADDRESSABLE (sym)
1902 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1903 		  lhs = sym;
1904 		else
1905 		  lhs = gimple_assign_lhs (stmt);
1906 
1907 		/* Rewrite the RHS and make sure the resulting assignment
1908 		   is validly typed.  */
1909 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1910 		rhs = gimple_assign_rhs1 (stmt);
1911 		if (gimple_assign_lhs (stmt) != lhs
1912 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1913 						   TREE_TYPE (rhs)))
1914 		  {
1915 		    if (gimple_clobber_p (stmt))
1916 		      {
1917 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1918 			TREE_THIS_VOLATILE (rhs) = 1;
1919 		      }
1920 		    else
1921 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1922 					 TREE_TYPE (lhs), rhs);
1923 		  }
1924 		if (gimple_assign_lhs (stmt) != lhs)
1925 		  gimple_assign_set_lhs (stmt, lhs);
1926 
1927 		if (gimple_assign_rhs1 (stmt) != rhs)
1928 		  {
1929 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1930 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1931 		  }
1932 	      }
1933 
1934 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1935 	      {
1936 		unsigned i;
1937 		if (optimize_atomic_compare_exchange_p (stmt))
1938 		  {
1939 		    tree expected = gimple_call_arg (stmt, 1);
1940 		    if (bitmap_bit_p (suitable_for_renaming,
1941 				      DECL_UID (TREE_OPERAND (expected, 0))))
1942 		      {
1943 			fold_builtin_atomic_compare_exchange (&gsi);
1944 			continue;
1945 		      }
1946 		  }
1947 		else if (is_asan_mark_p (stmt))
1948 		  {
1949 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1950 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1951 		      {
1952 			unlink_stmt_vdef (stmt);
1953 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
1954 			  {
1955 			    gcall *call
1956 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1957 			    gimple_call_set_lhs (call, var);
1958 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
1959 			  }
1960 			else
1961 			  {
1962 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1963 			       is uninitialized.  Avoid dependencies on
1964 			       previous out of scope value.  */
1965 			    tree clobber
1966 			      = build_constructor (TREE_TYPE (var), NULL);
1967 			    TREE_THIS_VOLATILE (clobber) = 1;
1968 			    gimple *g = gimple_build_assign (var, clobber);
1969 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
1970 			  }
1971 			continue;
1972 		      }
1973 		  }
1974 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1975 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
1976 		    {
1977 		      tree *argp = gimple_call_arg_ptr (stmt, i);
1978 		      if (*argp == null_pointer_node)
1979 			continue;
1980 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1981 				  && VAR_P (TREE_OPERAND (*argp, 0)));
1982 		      tree var = TREE_OPERAND (*argp, 0);
1983 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1984 			*argp = null_pointer_node;
1985 		    }
1986 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1987 		  {
1988 		    tree *argp = gimple_call_arg_ptr (stmt, i);
1989 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1990 		  }
1991 	      }
1992 
1993 	    else if (gimple_code (stmt) == GIMPLE_ASM)
1994 	      {
1995 		gasm *asm_stmt = as_a <gasm *> (stmt);
1996 		unsigned i;
1997 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1998 		  {
1999 		    tree link = gimple_asm_output_op (asm_stmt, i);
2000 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2001 						suitable_for_renaming);
2002 		  }
2003 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2004 		  {
2005 		    tree link = gimple_asm_input_op (asm_stmt, i);
2006 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2007 						suitable_for_renaming);
2008 		  }
2009 	      }
2010 
2011 	    else if (gimple_debug_bind_p (stmt)
2012 		     && gimple_debug_bind_has_value_p (stmt))
2013 	      {
2014 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2015 		tree decl;
2016 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2017 		decl = non_rewritable_mem_ref_base (*valuep);
2018 		if (decl
2019 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2020 		  gimple_debug_bind_reset_value (stmt);
2021 	      }
2022 
2023 	    if (gimple_references_memory_p (stmt)
2024 		|| is_gimple_debug (stmt))
2025 	      update_stmt (stmt);
2026 
2027 	    gsi_next (&gsi);
2028 	  }
2029 
2030       /* Update SSA form here, we are called as non-pass as well.  */
2031       if (number_of_loops (cfun) > 1
2032 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2033 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2034       else
2035 	update_ssa (TODO_update_ssa);
2036     }
2037 
2038   timevar_pop (TV_ADDRESS_TAKEN);
2039 }
2040 
2041 namespace {
2042 
2043 const pass_data pass_data_update_address_taken =
2044 {
2045   GIMPLE_PASS, /* type */
2046   "addressables", /* name */
2047   OPTGROUP_NONE, /* optinfo_flags */
2048   TV_ADDRESS_TAKEN, /* tv_id */
2049   PROP_ssa, /* properties_required */
2050   0, /* properties_provided */
2051   0, /* properties_destroyed */
2052   0, /* todo_flags_start */
2053   TODO_update_address_taken, /* todo_flags_finish */
2054 };
2055 
2056 class pass_update_address_taken : public gimple_opt_pass
2057 {
2058 public:
2059   pass_update_address_taken (gcc::context *ctxt)
2060     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2061   {}
2062 
2063   /* opt_pass methods: */
2064 
2065 }; // class pass_update_address_taken
2066 
2067 } // anon namespace
2068 
2069 gimple_opt_pass *
2070 make_pass_update_address_taken (gcc::context *ctxt)
2071 {
2072   return new pass_update_address_taken (ctxt);
2073 }
2074