xref: /dragonfly/contrib/gcc-8.0/gcc/tree-ssa.c (revision 50b09fda)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
56 {
57   edge_var_map new_node;
58 
59   if (edge_var_maps == NULL)
60     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61 
62   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63   new_node.def = def;
64   new_node.result = result;
65   new_node.locus = locus;
66 
67   slot.safe_push (new_node);
68 }
69 
70 
71 /* Clear the var mappings in edge E.  */
72 
73 void
74 redirect_edge_var_map_clear (edge e)
75 {
76   if (!edge_var_maps)
77     return;
78 
79   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
80 
81   if (head)
82     head->release ();
83 }
84 
85 
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
87 
88    This assumes a hash_map can have multiple edges mapping to the same
89    var_map (many to one mapping), since we don't remove the previous mappings.
90    */
91 
92 void
93 redirect_edge_var_map_dup (edge newe, edge olde)
94 {
95   if (!edge_var_maps)
96     return;
97 
98   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100   if (!old_head)
101     return;
102 
103   new_head->safe_splice (*old_head);
104 }
105 
106 
107 /* Return the variable mappings for a given edge.  If there is none, return
108    NULL.  */
109 
110 vec<edge_var_map> *
111 redirect_edge_var_map_vector (edge e)
112 {
113   /* Hey, what kind of idiot would... you'd be surprised.  */
114   if (!edge_var_maps)
115     return NULL;
116 
117   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118   if (!slot)
119     return NULL;
120 
121   return slot;
122 }
123 
124 /* Clear the edge variable mappings.  */
125 
126 void
127 redirect_edge_var_map_empty (void)
128 {
129   if (edge_var_maps)
130     edge_var_maps->empty ();
131 }
132 
133 
134 /* Remove the corresponding arguments from the PHI nodes in E's
135    destination block and redirect it to DEST.  Return redirected edge.
136    The list of removed arguments is stored in a vector accessed
137    through edge_var_maps.  */
138 
139 edge
140 ssa_redirect_edge (edge e, basic_block dest)
141 {
142   gphi_iterator gsi;
143   gphi *phi;
144 
145   redirect_edge_var_map_clear (e);
146 
147   /* Remove the appropriate PHI arguments in E's destination block.
148      If we are redirecting a copied edge the destination has not
149      got PHI argument space reserved nor an interesting argument.  */
150   if (! (e->dest->flags & BB_DUPLICATED))
151     for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152       {
153 	tree def;
154 	source_location locus ;
155 
156 	phi = gsi.phi ();
157 	def = gimple_phi_arg_def (phi, e->dest_idx);
158 	locus = gimple_phi_arg_location (phi, e->dest_idx);
159 
160 	if (def == NULL_TREE)
161 	  continue;
162 
163 	redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164       }
165 
166   e = redirect_edge_succ_nodup (e, dest);
167 
168   return e;
169 }
170 
171 
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173    E->dest.  */
174 
175 void
176 flush_pending_stmts (edge e)
177 {
178   gphi *phi;
179   edge_var_map *vm;
180   int i;
181   gphi_iterator gsi;
182 
183   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184   if (!v)
185     return;
186 
187   for (gsi = gsi_start_phis (e->dest), i = 0;
188        !gsi_end_p (gsi) && v->iterate (i, &vm);
189        gsi_next (&gsi), i++)
190     {
191       tree def;
192 
193       phi = gsi.phi ();
194       def = redirect_edge_var_map_def (vm);
195       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196     }
197 
198   redirect_edge_var_map_clear (e);
199 }
200 
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203    expression with a different value.
204 
205    This will update any annotations (say debug bind stmts) referring
206    to the original LHS, so that they use the RHS instead.  This is
207    done even if NLHS and LHS are the same, for it is understood that
208    the RHS will be modified afterwards, and NLHS will not be assigned
209    an equivalent value.
210 
211    Adjusting any non-annotation uses of the LHS, if needed, is a
212    responsibility of the caller.
213 
214    The effect of this call should be pretty much the same as that of
215    inserting a copy of STMT before STMT, and then removing the
216    original stmt, at which time gsi_remove() would have update
217    annotations, but using this function saves all the inserting,
218    copying and removing.  */
219 
220 void
221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223   if (MAY_HAVE_DEBUG_BIND_STMTS)
224     {
225       tree lhs = gimple_get_lhs (stmt);
226 
227       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228 
229       insert_debug_temp_for_var_def (NULL, lhs);
230     }
231 
232   gimple_set_lhs (stmt, nlhs);
233 }
234 
235 
236 /* Given a tree for an expression for which we might want to emit
237    locations or values in debug information (generally a variable, but
238    we might deal with other kinds of trees in the future), return the
239    tree that should be used as the variable of a DEBUG_BIND STMT or
240    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
241 
242 tree
243 target_for_debug_bind (tree var)
244 {
245   if (!MAY_HAVE_DEBUG_BIND_STMTS)
246     return NULL_TREE;
247 
248   if (TREE_CODE (var) == SSA_NAME)
249     {
250       var = SSA_NAME_VAR (var);
251       if (var == NULL_TREE)
252 	return NULL_TREE;
253     }
254 
255   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256       && TREE_CODE (var) != PARM_DECL)
257     return NULL_TREE;
258 
259   if (DECL_HAS_VALUE_EXPR_P (var))
260     return target_for_debug_bind (DECL_VALUE_EXPR (var));
261 
262   if (DECL_IGNORED_P (var))
263     return NULL_TREE;
264 
265   /* var-tracking only tracks registers.  */
266   if (!is_gimple_reg_type (TREE_TYPE (var)))
267     return NULL_TREE;
268 
269   return var;
270 }
271 
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273    released.  */
274 
275 static tree
276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277 {
278   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279 
280   if (wi && wi->is_lhs)
281     return NULL_TREE;
282 
283   if (TREE_CODE (*tp) == SSA_NAME)
284     {
285       if (SSA_NAME_IN_FREE_LIST (*tp))
286 	return *tp;
287 
288       *walk_subtrees = 0;
289     }
290   else if (IS_TYPE_OR_DECL_P (*tp))
291     *walk_subtrees = 0;
292 
293   return NULL_TREE;
294 }
295 
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297    by other DEBUG stmts, and replace uses of the DEF with the
298    newly-created debug temp.  */
299 
300 void
301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302 {
303   imm_use_iterator imm_iter;
304   use_operand_p use_p;
305   gimple *stmt;
306   gimple *def_stmt = NULL;
307   int usecount = 0;
308   tree value = NULL;
309 
310   if (!MAY_HAVE_DEBUG_BIND_STMTS)
311     return;
312 
313   /* If this name has already been registered for replacement, do nothing
314      as anything that uses this name isn't in SSA form.  */
315   if (name_registered_for_update_p (var))
316     return;
317 
318   /* Check whether there are debug stmts that reference this variable and,
319      if there are, decide whether we should use a debug temp.  */
320   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321     {
322       stmt = USE_STMT (use_p);
323 
324       if (!gimple_debug_bind_p (stmt))
325 	continue;
326 
327       if (usecount++)
328 	break;
329 
330       if (gimple_debug_bind_get_value (stmt) != var)
331 	{
332 	  /* Count this as an additional use, so as to make sure we
333 	     use a temp unless VAR's definition has a SINGLE_RHS that
334 	     can be shared.  */
335 	  usecount++;
336 	  break;
337 	}
338     }
339 
340   if (!usecount)
341     return;
342 
343   if (gsi)
344     def_stmt = gsi_stmt (*gsi);
345   else
346     def_stmt = SSA_NAME_DEF_STMT (var);
347 
348   /* If we didn't get an insertion point, and the stmt has already
349      been removed, we won't be able to insert the debug bind stmt, so
350      we'll have to drop debug information.  */
351   if (gimple_code (def_stmt) == GIMPLE_PHI)
352     {
353       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 	value = NULL;
356       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 	 to.  */
358       else if (value == error_mark_node)
359 	value = NULL;
360     }
361   else if (is_gimple_assign (def_stmt))
362     {
363       bool no_value = false;
364 
365       if (!dom_info_available_p (CDI_DOMINATORS))
366 	{
367 	  struct walk_stmt_info wi;
368 
369 	  memset (&wi, 0, sizeof (wi));
370 
371 	  /* When removing blocks without following reverse dominance
372 	     order, we may sometimes encounter SSA_NAMEs that have
373 	     already been released, referenced in other SSA_DEFs that
374 	     we're about to release.  Consider:
375 
376 	     <bb X>:
377 	     v_1 = foo;
378 
379 	     <bb Y>:
380 	     w_2 = v_1 + bar;
381 	     # DEBUG w => w_2
382 
383 	     If we deleted BB X first, propagating the value of w_2
384 	     won't do us any good.  It's too late to recover their
385 	     original definition of v_1: when it was deleted, it was
386 	     only referenced in other DEFs, it couldn't possibly know
387 	     it should have been retained, and propagating every
388 	     single DEF just in case it might have to be propagated
389 	     into a DEBUG STMT would probably be too wasteful.
390 
391 	     When dominator information is not readily available, we
392 	     check for and accept some loss of debug information.  But
393 	     if it is available, there's no excuse for us to remove
394 	     blocks in the wrong order, so we don't even check for
395 	     dead SSA NAMEs.  SSA verification shall catch any
396 	     errors.  */
397 	  if ((!gsi && !gimple_bb (def_stmt))
398 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
399 	    no_value = true;
400 	}
401 
402       if (!no_value)
403 	value = gimple_assign_rhs_to_tree (def_stmt);
404     }
405 
406   if (value)
407     {
408       /* If there's a single use of VAR, and VAR is the entire debug
409 	 expression (usecount would have been incremented again
410 	 otherwise), and the definition involves only constants and
411 	 SSA names, then we can propagate VALUE into this single use,
412 	 avoiding the temp.
413 
414 	 We can also avoid using a temp if VALUE can be shared and
415 	 propagated into all uses, without generating expressions that
416 	 wouldn't be valid gimple RHSs.
417 
418 	 Other cases that would require unsharing or non-gimple RHSs
419 	 are deferred to a debug temp, although we could avoid temps
420 	 at the expense of duplication of expressions.  */
421 
422       if (CONSTANT_CLASS_P (value)
423 	  || gimple_code (def_stmt) == GIMPLE_PHI
424 	  || (usecount == 1
425 	      && (!gimple_assign_single_p (def_stmt)
426 		  || is_gimple_min_invariant (value)))
427 	  || is_gimple_reg (value))
428 	;
429       else
430 	{
431 	  gdebug *def_temp;
432 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
433 
434 	  def_temp = gimple_build_debug_bind (vexpr,
435 					      unshare_expr (value),
436 					      def_stmt);
437 
438 	  DECL_ARTIFICIAL (vexpr) = 1;
439 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
440 	  if (DECL_P (value))
441 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
442 	  else
443 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
444 
445 	  if (gsi)
446 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
447 	  else
448 	    {
449 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
450 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
451 	    }
452 
453 	  value = vexpr;
454 	}
455     }
456 
457   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
458     {
459       if (!gimple_debug_bind_p (stmt))
460 	continue;
461 
462       if (value)
463 	{
464 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
465 	    /* unshare_expr is not needed here.  vexpr is either a
466 	       SINGLE_RHS, that can be safely shared, some other RHS
467 	       that was unshared when we found it had a single debug
468 	       use, or a DEBUG_EXPR_DECL, that can be safely
469 	       shared.  */
470 	    SET_USE (use_p, unshare_expr (value));
471 	  /* If we didn't replace uses with a debug decl fold the
472 	     resulting expression.  Otherwise we end up with invalid IL.  */
473 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
474 	    {
475 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
476 	      fold_stmt_inplace (&gsi);
477 	    }
478 	}
479       else
480 	gimple_debug_bind_reset_value (stmt);
481 
482       update_stmt (stmt);
483     }
484 }
485 
486 
487 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
488    other DEBUG stmts, and replace uses of the DEF with the
489    newly-created debug temp.  */
490 
491 void
492 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
493 {
494   gimple *stmt;
495   ssa_op_iter op_iter;
496   def_operand_p def_p;
497 
498   if (!MAY_HAVE_DEBUG_BIND_STMTS)
499     return;
500 
501   stmt = gsi_stmt (*gsi);
502 
503   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
504     {
505       tree var = DEF_FROM_PTR (def_p);
506 
507       if (TREE_CODE (var) != SSA_NAME)
508 	continue;
509 
510       insert_debug_temp_for_var_def (gsi, var);
511     }
512 }
513 
514 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
515 
516 void
517 reset_debug_uses (gimple *stmt)
518 {
519   ssa_op_iter op_iter;
520   def_operand_p def_p;
521   imm_use_iterator imm_iter;
522   gimple *use_stmt;
523 
524   if (!MAY_HAVE_DEBUG_BIND_STMTS)
525     return;
526 
527   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
528     {
529       tree var = DEF_FROM_PTR (def_p);
530 
531       if (TREE_CODE (var) != SSA_NAME)
532 	continue;
533 
534       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
535 	{
536 	  if (!gimple_debug_bind_p (use_stmt))
537 	    continue;
538 
539 	  gimple_debug_bind_reset_value (use_stmt);
540 	  update_stmt (use_stmt);
541 	}
542     }
543 }
544 
545 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
546    dominated stmts before their dominators, so that release_ssa_defs
547    stands a chance of propagating DEFs into debug bind stmts.  */
548 
549 void
550 release_defs_bitset (bitmap toremove)
551 {
552   unsigned j;
553   bitmap_iterator bi;
554 
555   /* Performing a topological sort is probably overkill, this will
556      most likely run in slightly superlinear time, rather than the
557      pathological quadratic worst case.  */
558   while (!bitmap_empty_p (toremove))
559     {
560       unsigned to_remove_bit = -1U;
561       EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
562 	{
563 	  if (to_remove_bit != -1U)
564 	    {
565 	      bitmap_clear_bit (toremove, to_remove_bit);
566 	      to_remove_bit = -1U;
567 	    }
568 
569 	  bool remove_now = true;
570 	  tree var = ssa_name (j);
571 	  gimple *stmt;
572 	  imm_use_iterator uit;
573 
574 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
575 	    {
576 	      ssa_op_iter dit;
577 	      def_operand_p def_p;
578 
579 	      /* We can't propagate PHI nodes into debug stmts.  */
580 	      if (gimple_code (stmt) == GIMPLE_PHI
581 		  || is_gimple_debug (stmt))
582 		continue;
583 
584 	      /* If we find another definition to remove that uses
585 		 the one we're looking at, defer the removal of this
586 		 one, so that it can be propagated into debug stmts
587 		 after the other is.  */
588 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
589 		{
590 		  tree odef = DEF_FROM_PTR (def_p);
591 
592 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
593 		    {
594 		      remove_now = false;
595 		      break;
596 		    }
597 		}
598 
599 	      if (!remove_now)
600 		BREAK_FROM_IMM_USE_STMT (uit);
601 	    }
602 
603 	  if (remove_now)
604 	    {
605 	      gimple *def = SSA_NAME_DEF_STMT (var);
606 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
607 
608 	      if (gimple_code (def) == GIMPLE_PHI)
609 		remove_phi_node (&gsi, true);
610 	      else
611 		{
612 		  gsi_remove (&gsi, true);
613 		  release_defs (def);
614 		}
615 
616 	      to_remove_bit = j;
617 	    }
618 	}
619       if (to_remove_bit != -1U)
620 	bitmap_clear_bit (toremove, to_remove_bit);
621     }
622 
623 }
624 
625 /* Verify virtual SSA form.  */
626 
627 bool
628 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
629 {
630   bool err = false;
631 
632   if (bitmap_bit_p (visited, bb->index))
633     return false;
634 
635   bitmap_set_bit (visited, bb->index);
636 
637   /* Pick up the single virtual PHI def.  */
638   gphi *phi = NULL;
639   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
640        gsi_next (&si))
641     {
642       tree res = gimple_phi_result (si.phi ());
643       if (virtual_operand_p (res))
644 	{
645 	  if (phi)
646 	    {
647 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
648 	      print_gimple_stmt (stderr, phi, 0);
649 	      print_gimple_stmt (stderr, si.phi (), 0);
650 	      err = true;
651 	    }
652 	  else
653 	    phi = si.phi ();
654 	}
655     }
656   if (phi)
657     {
658       current_vdef = gimple_phi_result (phi);
659       if (TREE_CODE (current_vdef) != SSA_NAME)
660 	{
661 	  error ("virtual definition is not an SSA name");
662 	  print_gimple_stmt (stderr, phi, 0);
663 	  err = true;
664 	}
665     }
666 
667   /* Verify stmts.  */
668   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
669        gsi_next (&gsi))
670     {
671       gimple *stmt = gsi_stmt (gsi);
672       tree vuse = gimple_vuse (stmt);
673       if (vuse)
674 	{
675 	  if (vuse != current_vdef)
676 	    {
677 	      error ("stmt with wrong VUSE");
678 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
679 	      fprintf (stderr, "expected ");
680 	      print_generic_expr (stderr, current_vdef);
681 	      fprintf (stderr, "\n");
682 	      err = true;
683 	    }
684 	  tree vdef = gimple_vdef (stmt);
685 	  if (vdef)
686 	    {
687 	      current_vdef = vdef;
688 	      if (TREE_CODE (current_vdef) != SSA_NAME)
689 		{
690 		  error ("virtual definition is not an SSA name");
691 		  print_gimple_stmt (stderr, phi, 0);
692 		  err = true;
693 		}
694 	    }
695 	}
696     }
697 
698   /* Verify destination PHI uses and recurse.  */
699   edge_iterator ei;
700   edge e;
701   FOR_EACH_EDGE (e, ei, bb->succs)
702     {
703       gphi *phi = get_virtual_phi (e->dest);
704       if (phi
705 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
706 	{
707 	  error ("PHI node with wrong VUSE on edge from BB %d",
708 		 e->src->index);
709 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
710 	  fprintf (stderr, "expected ");
711 	  print_generic_expr (stderr, current_vdef);
712 	  fprintf (stderr, "\n");
713 	  err = true;
714 	}
715 
716       /* Recurse.  */
717       err |= verify_vssa (e->dest, current_vdef, visited);
718     }
719 
720   return err;
721 }
722 
723 /* Return true if SSA_NAME is malformed and mark it visited.
724 
725    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
726       operand.  */
727 
728 static bool
729 verify_ssa_name (tree ssa_name, bool is_virtual)
730 {
731   if (TREE_CODE (ssa_name) != SSA_NAME)
732     {
733       error ("expected an SSA_NAME object");
734       return true;
735     }
736 
737   if (SSA_NAME_IN_FREE_LIST (ssa_name))
738     {
739       error ("found an SSA_NAME that had been released into the free pool");
740       return true;
741     }
742 
743   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
744       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
745     {
746       error ("type mismatch between an SSA_NAME and its symbol");
747       return true;
748     }
749 
750   if (is_virtual && !virtual_operand_p (ssa_name))
751     {
752       error ("found a virtual definition for a GIMPLE register");
753       return true;
754     }
755 
756   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
757     {
758       error ("virtual SSA name for non-VOP decl");
759       return true;
760     }
761 
762   if (!is_virtual && virtual_operand_p (ssa_name))
763     {
764       error ("found a real definition for a non-register");
765       return true;
766     }
767 
768   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
769       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
770     {
771       error ("found a default name with a non-empty defining statement");
772       return true;
773     }
774 
775   return false;
776 }
777 
778 
779 /* Return true if the definition of SSA_NAME at block BB is malformed.
780 
781    STMT is the statement where SSA_NAME is created.
782 
783    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
784       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
785       it means that the block in that array slot contains the
786       definition of SSA_NAME.
787 
788    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
789 
790 static bool
791 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
792 	    gimple *stmt, bool is_virtual)
793 {
794   if (verify_ssa_name (ssa_name, is_virtual))
795     goto err;
796 
797   if (SSA_NAME_VAR (ssa_name)
798       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
799       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
800     {
801       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
802       goto err;
803     }
804 
805   if (definition_block[SSA_NAME_VERSION (ssa_name)])
806     {
807       error ("SSA_NAME created in two different blocks %i and %i",
808 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
809       goto err;
810     }
811 
812   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
813 
814   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
815     {
816       error ("SSA_NAME_DEF_STMT is wrong");
817       fprintf (stderr, "Expected definition statement:\n");
818       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
819       fprintf (stderr, "\nActual definition statement:\n");
820       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
821       goto err;
822     }
823 
824   return false;
825 
826 err:
827   fprintf (stderr, "while verifying SSA_NAME ");
828   print_generic_expr (stderr, ssa_name);
829   fprintf (stderr, " in statement\n");
830   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
831 
832   return true;
833 }
834 
835 
836 /* Return true if the use of SSA_NAME at statement STMT in block BB is
837    malformed.
838 
839    DEF_BB is the block where SSA_NAME was found to be created.
840 
841    IDOM contains immediate dominator information for the flowgraph.
842 
843    CHECK_ABNORMAL is true if the caller wants to check whether this use
844       is flowing through an abnormal edge (only used when checking PHI
845       arguments).
846 
847    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
848      that are defined before STMT in basic block BB.  */
849 
850 static bool
851 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
852 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
853 {
854   bool err = false;
855   tree ssa_name = USE_FROM_PTR (use_p);
856 
857   if (!TREE_VISITED (ssa_name))
858     if (verify_imm_links (stderr, ssa_name))
859       err = true;
860 
861   TREE_VISITED (ssa_name) = 1;
862 
863   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
864       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
865     ; /* Default definitions have empty statements.  Nothing to do.  */
866   else if (!def_bb)
867     {
868       error ("missing definition");
869       err = true;
870     }
871   else if (bb != def_bb
872 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
873     {
874       error ("definition in block %i does not dominate use in block %i",
875 	     def_bb->index, bb->index);
876       err = true;
877     }
878   else if (bb == def_bb
879 	   && names_defined_in_bb != NULL
880 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
881     {
882       error ("definition in block %i follows the use", def_bb->index);
883       err = true;
884     }
885 
886   if (check_abnormal
887       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
888     {
889       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
890       err = true;
891     }
892 
893   /* Make sure the use is in an appropriate list by checking the previous
894      element to make sure it's the same.  */
895   if (use_p->prev == NULL)
896     {
897       error ("no immediate_use list");
898       err = true;
899     }
900   else
901     {
902       tree listvar;
903       if (use_p->prev->use == NULL)
904 	listvar = use_p->prev->loc.ssa_name;
905       else
906 	listvar = USE_FROM_PTR (use_p->prev);
907       if (listvar != ssa_name)
908         {
909 	  error ("wrong immediate use list");
910 	  err = true;
911 	}
912     }
913 
914   if (err)
915     {
916       fprintf (stderr, "for SSA_NAME: ");
917       print_generic_expr (stderr, ssa_name, TDF_VOPS);
918       fprintf (stderr, " in statement:\n");
919       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
920     }
921 
922   return err;
923 }
924 
925 
926 /* Return true if any of the arguments for PHI node PHI at block BB is
927    malformed.
928 
929    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
930       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
931       it means that the block in that array slot contains the
932       definition of SSA_NAME.  */
933 
934 static bool
935 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
936 {
937   edge e;
938   bool err = false;
939   size_t i, phi_num_args = gimple_phi_num_args (phi);
940 
941   if (EDGE_COUNT (bb->preds) != phi_num_args)
942     {
943       error ("incoming edge count does not match number of PHI arguments");
944       err = true;
945       goto error;
946     }
947 
948   for (i = 0; i < phi_num_args; i++)
949     {
950       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
951       tree op = USE_FROM_PTR (op_p);
952 
953       e = EDGE_PRED (bb, i);
954 
955       if (op == NULL_TREE)
956 	{
957 	  error ("PHI argument is missing for edge %d->%d",
958 	         e->src->index,
959 		 e->dest->index);
960 	  err = true;
961 	  goto error;
962 	}
963 
964       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
965 	{
966 	  error ("PHI argument is not SSA_NAME, or invariant");
967 	  err = true;
968 	}
969 
970       if (TREE_CODE (op) == SSA_NAME)
971 	{
972 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
973 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
974 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
975 	}
976 
977       if (TREE_CODE (op) == ADDR_EXPR)
978 	{
979 	  tree base = TREE_OPERAND (op, 0);
980 	  while (handled_component_p (base))
981 	    base = TREE_OPERAND (base, 0);
982 	  if ((VAR_P (base)
983 	       || TREE_CODE (base) == PARM_DECL
984 	       || TREE_CODE (base) == RESULT_DECL)
985 	      && !TREE_ADDRESSABLE (base))
986 	    {
987 	      error ("address taken, but ADDRESSABLE bit not set");
988 	      err = true;
989 	    }
990 	}
991 
992       if (e->dest != bb)
993 	{
994 	  error ("wrong edge %d->%d for PHI argument",
995 	         e->src->index, e->dest->index);
996 	  err = true;
997 	}
998 
999       if (err)
1000 	{
1001 	  fprintf (stderr, "PHI argument\n");
1002 	  print_generic_stmt (stderr, op, TDF_VOPS);
1003 	  goto error;
1004 	}
1005     }
1006 
1007 error:
1008   if (err)
1009     {
1010       fprintf (stderr, "for PHI node\n");
1011       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1012     }
1013 
1014 
1015   return err;
1016 }
1017 
1018 
1019 /* Verify common invariants in the SSA web.
1020    TODO: verify the variable annotations.  */
1021 
1022 DEBUG_FUNCTION void
1023 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1024 {
1025   basic_block bb;
1026   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1027   ssa_op_iter iter;
1028   tree op;
1029   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1030   auto_bitmap names_defined_in_bb;
1031 
1032   gcc_assert (!need_ssa_update_p (cfun));
1033 
1034   timevar_push (TV_TREE_SSA_VERIFY);
1035 
1036     {
1037       /* Keep track of SSA names present in the IL.  */
1038       size_t i;
1039       tree name;
1040       hash_map <void *, tree> ssa_info;
1041 
1042       FOR_EACH_SSA_NAME (i, name, cfun)
1043 	{
1044 	  gimple *stmt;
1045 	  TREE_VISITED (name) = 0;
1046 
1047 	  verify_ssa_name (name, virtual_operand_p (name));
1048 
1049 	  stmt = SSA_NAME_DEF_STMT (name);
1050 	  if (!gimple_nop_p (stmt))
1051 	    {
1052 	      basic_block bb = gimple_bb (stmt);
1053 	      if (verify_def (bb, definition_block,
1054 			      name, stmt, virtual_operand_p (name)))
1055 		goto err;
1056 	    }
1057 
1058 	  void *info = NULL;
1059 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1060 	    info = SSA_NAME_PTR_INFO (name);
1061 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1062 	    info = SSA_NAME_RANGE_INFO (name);
1063 	  if (info)
1064 	    {
1065 	      bool existed;
1066 	      tree &val = ssa_info.get_or_insert (info, &existed);
1067 	      if (existed)
1068 		{
1069 		  error ("shared SSA name info");
1070 		  print_generic_expr (stderr, val);
1071 		  fprintf (stderr, " and ");
1072 		  print_generic_expr (stderr, name);
1073 		  fprintf (stderr, "\n");
1074 		  goto err;
1075 		}
1076 	      else
1077 		val = name;
1078 	    }
1079 	}
1080     }
1081 
1082   calculate_dominance_info (CDI_DOMINATORS);
1083 
1084   /* Now verify all the uses and make sure they agree with the definitions
1085      found in the previous pass.  */
1086   FOR_EACH_BB_FN (bb, cfun)
1087     {
1088       edge e;
1089       edge_iterator ei;
1090 
1091       /* Make sure that all edges have a clear 'aux' field.  */
1092       FOR_EACH_EDGE (e, ei, bb->preds)
1093 	{
1094 	  if (e->aux)
1095 	    {
1096 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1097 		      e->dest->index);
1098 	      goto err;
1099 	    }
1100 	}
1101 
1102       /* Verify the arguments for every PHI node in the block.  */
1103       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1104 	{
1105 	  gphi *phi = gsi.phi ();
1106 	  if (verify_phi_args (phi, bb, definition_block))
1107 	    goto err;
1108 
1109 	  bitmap_set_bit (names_defined_in_bb,
1110 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1111 	}
1112 
1113       /* Now verify all the uses and vuses in every statement of the block.  */
1114       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1115 	   gsi_next (&gsi))
1116 	{
1117 	  gimple *stmt = gsi_stmt (gsi);
1118 	  use_operand_p use_p;
1119 
1120 	  if (check_modified_stmt && gimple_modified_p (stmt))
1121 	    {
1122 	      error ("stmt (%p) marked modified after optimization pass: ",
1123 		     (void *)stmt);
1124 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1125 	      goto err;
1126 	    }
1127 
1128 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1129 	    {
1130 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1131 	      goto err;
1132 	    }
1133 
1134 	  if (gimple_debug_bind_p (stmt)
1135 	      && !gimple_debug_bind_has_value_p (stmt))
1136 	    continue;
1137 
1138 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1139 	    {
1140 	      op = USE_FROM_PTR (use_p);
1141 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1142 			      use_p, stmt, false, names_defined_in_bb))
1143 		goto err;
1144 	    }
1145 
1146 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1147 	    {
1148 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1149 		{
1150 		  error ("SSA_NAME_DEF_STMT is wrong");
1151 		  fprintf (stderr, "Expected definition statement:\n");
1152 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1153 		  fprintf (stderr, "\nActual definition statement:\n");
1154 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1155 				     4, TDF_VOPS);
1156 		  goto err;
1157 		}
1158 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1159 	    }
1160 	}
1161 
1162       bitmap_clear (names_defined_in_bb);
1163     }
1164 
1165   free (definition_block);
1166 
1167   if (gimple_vop (cfun)
1168       && ssa_default_def (cfun, gimple_vop (cfun)))
1169     {
1170       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1171       bitmap_clear (visited);
1172       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1173 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1174 	goto err;
1175     }
1176 
1177   /* Restore the dominance information to its prior known state, so
1178      that we do not perturb the compiler's subsequent behavior.  */
1179   if (orig_dom_state == DOM_NONE)
1180     free_dominance_info (CDI_DOMINATORS);
1181   else
1182     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1183 
1184   timevar_pop (TV_TREE_SSA_VERIFY);
1185   return;
1186 
1187 err:
1188   internal_error ("verify_ssa failed");
1189 }
1190 
1191 
1192 /* Initialize global DFA and SSA structures.  */
1193 
1194 void
1195 init_tree_ssa (struct function *fn)
1196 {
1197   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1198   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1199   pt_solution_reset (&fn->gimple_df->escaped);
1200   init_ssanames (fn, 0);
1201 }
1202 
1203 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1204 
1205 void
1206 delete_tree_ssa (struct function *fn)
1207 {
1208   fini_ssanames (fn);
1209 
1210   /* We no longer maintain the SSA operand cache at this point.  */
1211   if (ssa_operands_active (fn))
1212     fini_ssa_operands (fn);
1213 
1214   fn->gimple_df->default_defs->empty ();
1215   fn->gimple_df->default_defs = NULL;
1216   pt_solution_reset (&fn->gimple_df->escaped);
1217   if (fn->gimple_df->decls_to_pointers != NULL)
1218     delete fn->gimple_df->decls_to_pointers;
1219   fn->gimple_df->decls_to_pointers = NULL;
1220   fn->gimple_df = NULL;
1221 
1222   /* We no longer need the edge variable maps.  */
1223   redirect_edge_var_map_empty ();
1224 }
1225 
1226 /* Return true if EXPR is a useless type conversion, otherwise return
1227    false.  */
1228 
1229 bool
1230 tree_ssa_useless_type_conversion (tree expr)
1231 {
1232   /* If we have an assignment that merely uses a NOP_EXPR to change
1233      the top of the RHS to the type of the LHS and the type conversion
1234      is "safe", then strip away the type conversion so that we can
1235      enter LHS = RHS into the const_and_copies table.  */
1236   if (CONVERT_EXPR_P (expr)
1237       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1238       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1239     return useless_type_conversion_p
1240       (TREE_TYPE (expr),
1241        TREE_TYPE (TREE_OPERAND (expr, 0)));
1242 
1243   return false;
1244 }
1245 
1246 /* Strip conversions from EXP according to
1247    tree_ssa_useless_type_conversion and return the resulting
1248    expression.  */
1249 
1250 tree
1251 tree_ssa_strip_useless_type_conversions (tree exp)
1252 {
1253   while (tree_ssa_useless_type_conversion (exp))
1254     exp = TREE_OPERAND (exp, 0);
1255   return exp;
1256 }
1257 
1258 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1259 
1260 bool
1261 ssa_defined_default_def_p (tree t)
1262 {
1263   tree var = SSA_NAME_VAR (t);
1264 
1265   if (!var)
1266     ;
1267   /* Parameters get their initial value from the function entry.  */
1268   else if (TREE_CODE (var) == PARM_DECL)
1269     return true;
1270   /* When returning by reference the return address is actually a hidden
1271      parameter.  */
1272   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1273     return true;
1274   /* Hard register variables get their initial value from the ether.  */
1275   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1276     return true;
1277 
1278   return false;
1279 }
1280 
1281 
1282 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1283    should be returned if the value is only partially undefined.  */
1284 
1285 bool
1286 ssa_undefined_value_p (tree t, bool partial)
1287 {
1288   gimple *def_stmt;
1289 
1290   if (ssa_defined_default_def_p (t))
1291     return false;
1292 
1293   /* The value is undefined iff its definition statement is empty.  */
1294   def_stmt = SSA_NAME_DEF_STMT (t);
1295   if (gimple_nop_p (def_stmt))
1296     return true;
1297 
1298   /* Check if the complex was not only partially defined.  */
1299   if (partial && is_gimple_assign (def_stmt)
1300       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1301     {
1302       tree rhs1, rhs2;
1303 
1304       rhs1 = gimple_assign_rhs1 (def_stmt);
1305       rhs2 = gimple_assign_rhs2 (def_stmt);
1306       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1307 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1308     }
1309   return false;
1310 }
1311 
1312 
1313 /* Return TRUE iff STMT, a gimple statement, references an undefined
1314    SSA name.  */
1315 
1316 bool
1317 gimple_uses_undefined_value_p (gimple *stmt)
1318 {
1319   ssa_op_iter iter;
1320   tree op;
1321 
1322   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1323     if (ssa_undefined_value_p (op))
1324       return true;
1325 
1326   return false;
1327 }
1328 
1329 
1330 
1331 /* If necessary, rewrite the base of the reference tree *TP from
1332    a MEM_REF to a plain or converted symbol.  */
1333 
1334 static void
1335 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1336 {
1337   tree sym;
1338 
1339   while (handled_component_p (*tp))
1340     tp = &TREE_OPERAND (*tp, 0);
1341   if (TREE_CODE (*tp) == MEM_REF
1342       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1343       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1344       && DECL_P (sym)
1345       && !TREE_ADDRESSABLE (sym)
1346       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1347       && is_gimple_reg_type (TREE_TYPE (*tp))
1348       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1349     {
1350       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1351 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1352 					TREE_TYPE (TREE_TYPE (sym)))
1353 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1354 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1355 	{
1356 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1357 			TYPE_SIZE (TREE_TYPE (*tp)),
1358 			int_const_binop (MULT_EXPR,
1359 					 bitsize_int (BITS_PER_UNIT),
1360 					 TREE_OPERAND (*tp, 1)));
1361 	}
1362       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1363 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1364 					     TREE_TYPE (TREE_TYPE (sym))))
1365 	{
1366 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1367 			? REALPART_EXPR : IMAGPART_EXPR,
1368 			TREE_TYPE (*tp), sym);
1369 	}
1370       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1371 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1372 	{
1373 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1374 					  TREE_TYPE (sym)))
1375 	    *tp = build1 (VIEW_CONVERT_EXPR,
1376 			  TREE_TYPE (*tp), sym);
1377 	  else
1378 	    *tp = sym;
1379 	}
1380       else if (DECL_SIZE (sym)
1381 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1382 	       && (known_subrange_p
1383 		   (mem_ref_offset (*tp),
1384 		    wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1385 		    0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1386 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1387 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1388 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1389 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1390 				  BITS_PER_UNIT) == 0)
1391 	{
1392 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1393 			TYPE_SIZE (TREE_TYPE (*tp)),
1394 			wide_int_to_tree (bitsizetype,
1395 					  mem_ref_offset (*tp)
1396 					  << LOG2_BITS_PER_UNIT));
1397 	}
1398     }
1399 }
1400 
1401 /* For a tree REF return its base if it is the base of a MEM_REF
1402    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1403 
1404 static tree
1405 non_rewritable_mem_ref_base (tree ref)
1406 {
1407   tree base;
1408 
1409   /* A plain decl does not need it set.  */
1410   if (DECL_P (ref))
1411     return NULL_TREE;
1412 
1413   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1414     {
1415       base = get_base_address (ref);
1416       if (DECL_P (base))
1417 	return base;
1418       return NULL_TREE;
1419     }
1420 
1421   /* But watch out for MEM_REFs we cannot lower to a
1422      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1423   if (TREE_CODE (base) == MEM_REF
1424       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1425     {
1426       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1427       if (! DECL_P (decl))
1428 	return NULL_TREE;
1429       if (! is_gimple_reg_type (TREE_TYPE (base))
1430 	  || VOID_TYPE_P (TREE_TYPE (base))
1431 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1432 	return decl;
1433       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1434 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1435 	  && useless_type_conversion_p (TREE_TYPE (base),
1436 					TREE_TYPE (TREE_TYPE (decl)))
1437 	  && known_ge (mem_ref_offset (base), 0)
1438 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1439 		       mem_ref_offset (base))
1440 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1441 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1442 	return NULL_TREE;
1443       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1444       if (integer_zerop (TREE_OPERAND (base, 1))
1445 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1446 	return NULL_TREE;
1447       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1448       if (DECL_SIZE (decl)
1449 	  && (known_subrange_p
1450 	      (mem_ref_offset (base),
1451 	       wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1452 	       0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1453 	  /* ???  We can't handle bitfield precision extracts without
1454 	     either using an alternate type for the BIT_FIELD_REF and
1455 	     then doing a conversion or possibly adjusting the offset
1456 	     according to endianness.  */
1457 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1458 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1459 		  == TYPE_PRECISION (TREE_TYPE (base))))
1460 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1461 			     BITS_PER_UNIT) == 0)
1462 	return NULL_TREE;
1463       return decl;
1464     }
1465 
1466   return NULL_TREE;
1467 }
1468 
1469 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1470    Otherwise return true.  */
1471 
1472 static bool
1473 non_rewritable_lvalue_p (tree lhs)
1474 {
1475   /* A plain decl is always rewritable.  */
1476   if (DECL_P (lhs))
1477     return false;
1478 
1479   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1480      a reasonably efficient manner... */
1481   if ((TREE_CODE (lhs) == REALPART_EXPR
1482        || TREE_CODE (lhs) == IMAGPART_EXPR)
1483       && DECL_P (TREE_OPERAND (lhs, 0)))
1484     return false;
1485 
1486   /* ???  The following could be relaxed allowing component
1487      references that do not change the access size.  */
1488   if (TREE_CODE (lhs) == MEM_REF
1489       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1490     {
1491       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1492 
1493       /* A decl that is wrapped inside a MEM-REF that covers
1494 	 it full is also rewritable.  */
1495       if (integer_zerop (TREE_OPERAND (lhs, 1))
1496 	  && DECL_P (decl)
1497 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1498 	  /* If the dynamic type of the decl has larger precision than
1499 	     the decl itself we can't use the decls type for SSA rewriting.  */
1500 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1501 	       || compare_tree_int (DECL_SIZE (decl),
1502 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1503 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1504 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1505 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1506 	  /* Make sure we are not re-writing non-float copying into float
1507 	     copying as that can incur normalization.  */
1508 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1509 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1510 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1511 	return false;
1512 
1513       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1514 	 using a BIT_INSERT_EXPR.  */
1515       if (DECL_P (decl)
1516 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1517 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1518 	  && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1519 			      TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0)
1520 	  && known_ge (mem_ref_offset (lhs), 0)
1521 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1522 		       mem_ref_offset (lhs))
1523 	  && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1524 			    TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1525 	return false;
1526     }
1527 
1528   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1529      BIT_INSERT_EXPR.  */
1530   if (TREE_CODE (lhs) == BIT_FIELD_REF
1531       && DECL_P (TREE_OPERAND (lhs, 0))
1532       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1533       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1534       && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1535 			  TYPE_SIZE_UNIT
1536 			    (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1537       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1538 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1539     return false;
1540 
1541   return true;
1542 }
1543 
1544 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1545    mark the variable VAR for conversion into SSA.  Return true when updating
1546    stmts is required.  */
1547 
1548 static void
1549 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1550 		    bitmap suitable_for_renaming)
1551 {
1552   /* Global Variables, result decls cannot be changed.  */
1553   if (is_global_var (var)
1554       || TREE_CODE (var) == RESULT_DECL
1555       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1556     return;
1557 
1558   if (TREE_ADDRESSABLE (var)
1559       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1560 	 a non-register.  Otherwise we are confused and forget to
1561 	 add virtual operands for it.  */
1562       && (!is_gimple_reg_type (TREE_TYPE (var))
1563 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1564 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1565 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1566     {
1567       TREE_ADDRESSABLE (var) = 0;
1568       if (is_gimple_reg (var))
1569 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1570       if (dump_file)
1571 	{
1572 	  fprintf (dump_file, "No longer having address taken: ");
1573 	  print_generic_expr (dump_file, var);
1574 	  fprintf (dump_file, "\n");
1575 	}
1576     }
1577 
1578   if (!DECL_GIMPLE_REG_P (var)
1579       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1580       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1581 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1582       && !TREE_THIS_VOLATILE (var)
1583       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1584     {
1585       DECL_GIMPLE_REG_P (var) = 1;
1586       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1587       if (dump_file)
1588 	{
1589 	  fprintf (dump_file, "Now a gimple register: ");
1590 	  print_generic_expr (dump_file, var);
1591 	  fprintf (dump_file, "\n");
1592 	}
1593     }
1594 }
1595 
1596 /* Return true when STMT is ASAN mark where second argument is an address
1597    of a local variable.  */
1598 
1599 static bool
1600 is_asan_mark_p (gimple *stmt)
1601 {
1602   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1603     return false;
1604 
1605   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1606   if (TREE_CODE (addr) == ADDR_EXPR
1607       && VAR_P (TREE_OPERAND (addr, 0)))
1608     {
1609       tree var = TREE_OPERAND (addr, 0);
1610       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1611 			    DECL_ATTRIBUTES (var)))
1612 	return false;
1613 
1614       unsigned addressable = TREE_ADDRESSABLE (var);
1615       TREE_ADDRESSABLE (var) = 0;
1616       bool r = is_gimple_reg (var);
1617       TREE_ADDRESSABLE (var) = addressable;
1618       return r;
1619     }
1620 
1621   return false;
1622 }
1623 
1624 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1625 
1626 void
1627 execute_update_addresses_taken (void)
1628 {
1629   basic_block bb;
1630   auto_bitmap addresses_taken;
1631   auto_bitmap not_reg_needs;
1632   auto_bitmap suitable_for_renaming;
1633   tree var;
1634   unsigned i;
1635 
1636   timevar_push (TV_ADDRESS_TAKEN);
1637 
1638   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1639      the function body.  */
1640   FOR_EACH_BB_FN (bb, cfun)
1641     {
1642       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1643 	   gsi_next (&gsi))
1644 	{
1645 	  gimple *stmt = gsi_stmt (gsi);
1646 	  enum gimple_code code = gimple_code (stmt);
1647 	  tree decl;
1648 
1649 	  if (code == GIMPLE_CALL)
1650 	    {
1651 	      if (optimize_atomic_compare_exchange_p (stmt))
1652 		{
1653 		  /* For __atomic_compare_exchange_N if the second argument
1654 		     is &var, don't mark var addressable;
1655 		     if it becomes non-addressable, we'll rewrite it into
1656 		     ATOMIC_COMPARE_EXCHANGE call.  */
1657 		  tree arg = gimple_call_arg (stmt, 1);
1658 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1659 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1660 		  gimple_call_set_arg (stmt, 1, arg);
1661 		}
1662 	      else if (is_asan_mark_p (stmt)
1663 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1664 		;
1665 	      else
1666 		gimple_ior_addresses_taken (addresses_taken, stmt);
1667 	    }
1668 	  else
1669 	    /* Note all addresses taken by the stmt.  */
1670 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1671 
1672 	  /* If we have a call or an assignment, see if the lhs contains
1673 	     a local decl that requires not to be a gimple register.  */
1674 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1675 	    {
1676               tree lhs = gimple_get_lhs (stmt);
1677               if (lhs
1678 		  && TREE_CODE (lhs) != SSA_NAME
1679 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1680 		      || non_rewritable_lvalue_p (lhs)))
1681 		{
1682 		  decl = get_base_address (lhs);
1683 		  if (DECL_P (decl))
1684 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1685                 }
1686 	    }
1687 
1688 	  if (gimple_assign_single_p (stmt))
1689 	    {
1690 	      tree rhs = gimple_assign_rhs1 (stmt);
1691 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1692 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1693 	    }
1694 
1695 	  else if (code == GIMPLE_CALL)
1696 	    {
1697 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1698 		{
1699 		  tree arg = gimple_call_arg (stmt, i);
1700 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1701 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1702 		}
1703 	    }
1704 
1705 	  else if (code == GIMPLE_ASM)
1706 	    {
1707 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1708 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1709 		{
1710 		  tree link = gimple_asm_output_op (asm_stmt, i);
1711 		  tree lhs = TREE_VALUE (link);
1712 		  if (TREE_CODE (lhs) != SSA_NAME)
1713 		    {
1714 		      decl = get_base_address (lhs);
1715 		      if (DECL_P (decl)
1716 			  && (non_rewritable_lvalue_p (lhs)
1717 			      /* We cannot move required conversions from
1718 				 the lhs to the rhs in asm statements, so
1719 				 require we do not need any.  */
1720 			      || !useless_type_conversion_p
1721 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1722 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1723 		    }
1724 		}
1725 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1726 		{
1727 		  tree link = gimple_asm_input_op (asm_stmt, i);
1728 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1729 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1730 		}
1731 	    }
1732 	}
1733 
1734       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1735 	   gsi_next (&gsi))
1736 	{
1737 	  size_t i;
1738 	  gphi *phi = gsi.phi ();
1739 
1740 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1741 	    {
1742 	      tree op = PHI_ARG_DEF (phi, i), var;
1743 	      if (TREE_CODE (op) == ADDR_EXPR
1744 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1745 		  && DECL_P (var))
1746 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1747 	    }
1748 	}
1749     }
1750 
1751   /* We cannot iterate over all referenced vars because that can contain
1752      unused vars from BLOCK trees, which causes code generation differences
1753      for -g vs. -g0.  */
1754   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1755     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1756 			suitable_for_renaming);
1757 
1758   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1759     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1760 			suitable_for_renaming);
1761 
1762   /* Operand caches need to be recomputed for operands referencing the updated
1763      variables and operands need to be rewritten to expose bare symbols.  */
1764   if (!bitmap_empty_p (suitable_for_renaming))
1765     {
1766       FOR_EACH_BB_FN (bb, cfun)
1767 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1768 	  {
1769 	    gimple *stmt = gsi_stmt (gsi);
1770 
1771 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1772 	       rewrite into SSA form.  */
1773 	    if (gimple_assign_single_p (stmt))
1774 	      {
1775 		tree lhs = gimple_assign_lhs (stmt);
1776 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1777 		tree sym;
1778 
1779 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1780 		   gimplify_modify_expr_complex_part.  */
1781 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1782 		     || TREE_CODE (lhs) == REALPART_EXPR)
1783 		    && DECL_P (TREE_OPERAND (lhs, 0))
1784 		    && bitmap_bit_p (suitable_for_renaming,
1785 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1786 		  {
1787 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1788 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1789 					? REALPART_EXPR : IMAGPART_EXPR,
1790 					TREE_TYPE (other),
1791 					TREE_OPERAND (lhs, 0));
1792 		    gimple *load = gimple_build_assign (other, lrhs);
1793 		    location_t loc = gimple_location (stmt);
1794 		    gimple_set_location (load, loc);
1795 		    gimple_set_vuse (load, gimple_vuse (stmt));
1796 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1797 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1798 		    gimple_assign_set_rhs_with_ops
1799 		      (&gsi, COMPLEX_EXPR,
1800 		       TREE_CODE (lhs) == IMAGPART_EXPR
1801 		       ? other : gimple_assign_rhs1 (stmt),
1802 		       TREE_CODE (lhs) == IMAGPART_EXPR
1803 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1804 		    stmt = gsi_stmt (gsi);
1805 		    unlink_stmt_vdef (stmt);
1806 		    update_stmt (stmt);
1807 		    continue;
1808 		  }
1809 
1810 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1811 		   into a BIT_INSERT_EXPR.  */
1812 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1813 		    && DECL_P (TREE_OPERAND (lhs, 0))
1814 		    && bitmap_bit_p (suitable_for_renaming,
1815 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1816 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1817 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1818 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1819 					TYPE_SIZE_UNIT (TREE_TYPE
1820 					  (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1821 					0)
1822 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1823 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1824 		  {
1825 		    tree var = TREE_OPERAND (lhs, 0);
1826 		    tree val = gimple_assign_rhs1 (stmt);
1827 		    if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1828 					      TREE_TYPE (val)))
1829 		      {
1830 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1831 			gimple *pun
1832 			  = gimple_build_assign (tem,
1833 						 build1 (VIEW_CONVERT_EXPR,
1834 							 TREE_TYPE (tem), val));
1835 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1836 			val = tem;
1837 		      }
1838 		    tree bitpos = TREE_OPERAND (lhs, 2);
1839 		    gimple_assign_set_lhs (stmt, var);
1840 		    gimple_assign_set_rhs_with_ops
1841 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1842 		    stmt = gsi_stmt (gsi);
1843 		    unlink_stmt_vdef (stmt);
1844 		    update_stmt (stmt);
1845 		    continue;
1846 		  }
1847 
1848 		/* Rewrite a vector insert using a MEM_REF on the LHS
1849 		   into a BIT_INSERT_EXPR.  */
1850 		if (TREE_CODE (lhs) == MEM_REF
1851 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1852 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1853 		    && DECL_P (sym)
1854 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1855 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1856 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1857 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1858 					TYPE_SIZE_UNIT
1859 					  (TREE_TYPE (TREE_TYPE (sym))), 0)
1860 		    && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1861 		    && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1862 					TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1863 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1864 			% tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1865 		  {
1866 		    tree val = gimple_assign_rhs1 (stmt);
1867 		    if (! types_compatible_p (TREE_TYPE (val),
1868 					      TREE_TYPE (TREE_TYPE (sym))))
1869 		      {
1870 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym)));
1871 			gimple *pun
1872 			  = gimple_build_assign (tem,
1873 						 build1 (VIEW_CONVERT_EXPR,
1874 							 TREE_TYPE (tem), val));
1875 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1876 			val = tem;
1877 		      }
1878 		    tree bitpos
1879 		      = wide_int_to_tree (bitsizetype,
1880 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1881 		    gimple_assign_set_lhs (stmt, sym);
1882 		    gimple_assign_set_rhs_with_ops
1883 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1884 		    stmt = gsi_stmt (gsi);
1885 		    unlink_stmt_vdef (stmt);
1886 		    update_stmt (stmt);
1887 		    continue;
1888 		  }
1889 
1890 		/* We shouldn't have any fancy wrapping of
1891 		   component-refs on the LHS, but look through
1892 		   VIEW_CONVERT_EXPRs as that is easy.  */
1893 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1894 		  lhs = TREE_OPERAND (lhs, 0);
1895 		if (TREE_CODE (lhs) == MEM_REF
1896 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1897 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1898 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1899 		    && DECL_P (sym)
1900 		    && !TREE_ADDRESSABLE (sym)
1901 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1902 		  lhs = sym;
1903 		else
1904 		  lhs = gimple_assign_lhs (stmt);
1905 
1906 		/* Rewrite the RHS and make sure the resulting assignment
1907 		   is validly typed.  */
1908 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1909 		rhs = gimple_assign_rhs1 (stmt);
1910 		if (gimple_assign_lhs (stmt) != lhs
1911 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1912 						   TREE_TYPE (rhs)))
1913 		  {
1914 		    if (gimple_clobber_p (stmt))
1915 		      {
1916 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1917 			TREE_THIS_VOLATILE (rhs) = 1;
1918 		      }
1919 		    else
1920 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1921 					 TREE_TYPE (lhs), rhs);
1922 		  }
1923 		if (gimple_assign_lhs (stmt) != lhs)
1924 		  gimple_assign_set_lhs (stmt, lhs);
1925 
1926 		if (gimple_assign_rhs1 (stmt) != rhs)
1927 		  {
1928 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1929 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1930 		  }
1931 	      }
1932 
1933 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1934 	      {
1935 		unsigned i;
1936 		if (optimize_atomic_compare_exchange_p (stmt))
1937 		  {
1938 		    tree expected = gimple_call_arg (stmt, 1);
1939 		    if (bitmap_bit_p (suitable_for_renaming,
1940 				      DECL_UID (TREE_OPERAND (expected, 0))))
1941 		      {
1942 			fold_builtin_atomic_compare_exchange (&gsi);
1943 			continue;
1944 		      }
1945 		  }
1946 		else if (is_asan_mark_p (stmt))
1947 		  {
1948 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1949 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1950 		      {
1951 			unlink_stmt_vdef (stmt);
1952 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
1953 			  {
1954 			    gcall *call
1955 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1956 			    gimple_call_set_lhs (call, var);
1957 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
1958 			  }
1959 			else
1960 			  {
1961 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1962 			       is uninitialized.  Avoid dependencies on
1963 			       previous out of scope value.  */
1964 			    tree clobber
1965 			      = build_constructor (TREE_TYPE (var), NULL);
1966 			    TREE_THIS_VOLATILE (clobber) = 1;
1967 			    gimple *g = gimple_build_assign (var, clobber);
1968 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
1969 			  }
1970 			continue;
1971 		      }
1972 		  }
1973 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1974 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
1975 		    {
1976 		      tree *argp = gimple_call_arg_ptr (stmt, i);
1977 		      if (*argp == null_pointer_node)
1978 			continue;
1979 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1980 				  && VAR_P (TREE_OPERAND (*argp, 0)));
1981 		      tree var = TREE_OPERAND (*argp, 0);
1982 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1983 			*argp = null_pointer_node;
1984 		    }
1985 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1986 		  {
1987 		    tree *argp = gimple_call_arg_ptr (stmt, i);
1988 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1989 		  }
1990 	      }
1991 
1992 	    else if (gimple_code (stmt) == GIMPLE_ASM)
1993 	      {
1994 		gasm *asm_stmt = as_a <gasm *> (stmt);
1995 		unsigned i;
1996 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1997 		  {
1998 		    tree link = gimple_asm_output_op (asm_stmt, i);
1999 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2000 						suitable_for_renaming);
2001 		  }
2002 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2003 		  {
2004 		    tree link = gimple_asm_input_op (asm_stmt, i);
2005 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2006 						suitable_for_renaming);
2007 		  }
2008 	      }
2009 
2010 	    else if (gimple_debug_bind_p (stmt)
2011 		     && gimple_debug_bind_has_value_p (stmt))
2012 	      {
2013 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2014 		tree decl;
2015 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2016 		decl = non_rewritable_mem_ref_base (*valuep);
2017 		if (decl
2018 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2019 		  gimple_debug_bind_reset_value (stmt);
2020 	      }
2021 
2022 	    if (gimple_references_memory_p (stmt)
2023 		|| is_gimple_debug (stmt))
2024 	      update_stmt (stmt);
2025 
2026 	    gsi_next (&gsi);
2027 	  }
2028 
2029       /* Update SSA form here, we are called as non-pass as well.  */
2030       if (number_of_loops (cfun) > 1
2031 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2032 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2033       else
2034 	update_ssa (TODO_update_ssa);
2035     }
2036 
2037   timevar_pop (TV_ADDRESS_TAKEN);
2038 }
2039 
2040 namespace {
2041 
2042 const pass_data pass_data_update_address_taken =
2043 {
2044   GIMPLE_PASS, /* type */
2045   "addressables", /* name */
2046   OPTGROUP_NONE, /* optinfo_flags */
2047   TV_ADDRESS_TAKEN, /* tv_id */
2048   PROP_ssa, /* properties_required */
2049   0, /* properties_provided */
2050   0, /* properties_destroyed */
2051   0, /* todo_flags_start */
2052   TODO_update_address_taken, /* todo_flags_finish */
2053 };
2054 
2055 class pass_update_address_taken : public gimple_opt_pass
2056 {
2057 public:
2058   pass_update_address_taken (gcc::context *ctxt)
2059     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2060   {}
2061 
2062   /* opt_pass methods: */
2063 
2064 }; // class pass_update_address_taken
2065 
2066 } // anon namespace
2067 
2068 gimple_opt_pass *
2069 make_pass_update_address_taken (gcc::context *ctxt)
2070 {
2071   return new pass_update_address_taken (ctxt);
2072 }
2073