xref: /netbsd/external/gpl3/gcc.old/dist/gcc/tree-ssa.c (revision 840b4d17)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
redirect_edge_var_map_add(edge e,tree result,tree def,location_t locus)55 redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus)
56 {
57   edge_var_map new_node;
58 
59   if (edge_var_maps == NULL)
60     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61 
62   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63   new_node.def = def;
64   new_node.result = result;
65   new_node.locus = locus;
66 
67   slot.safe_push (new_node);
68 }
69 
70 
71 /* Clear the var mappings in edge E.  */
72 
73 void
redirect_edge_var_map_clear(edge e)74 redirect_edge_var_map_clear (edge e)
75 {
76   if (!edge_var_maps)
77     return;
78 
79   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
80 
81   if (head)
82     head->release ();
83 }
84 
85 
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
87 
88    This assumes a hash_map can have multiple edges mapping to the same
89    var_map (many to one mapping), since we don't remove the previous mappings.
90    */
91 
92 void
redirect_edge_var_map_dup(edge newe,edge olde)93 redirect_edge_var_map_dup (edge newe, edge olde)
94 {
95   if (!edge_var_maps)
96     return;
97 
98   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100   if (!old_head)
101     return;
102 
103   new_head->safe_splice (*old_head);
104 }
105 
106 
107 /* Return the variable mappings for a given edge.  If there is none, return
108    NULL.  */
109 
110 vec<edge_var_map> *
redirect_edge_var_map_vector(edge e)111 redirect_edge_var_map_vector (edge e)
112 {
113   /* Hey, what kind of idiot would... you'd be surprised.  */
114   if (!edge_var_maps)
115     return NULL;
116 
117   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118   if (!slot)
119     return NULL;
120 
121   return slot;
122 }
123 
124 /* Clear the edge variable mappings.  */
125 
126 void
redirect_edge_var_map_empty(void)127 redirect_edge_var_map_empty (void)
128 {
129   if (edge_var_maps)
130     edge_var_maps->empty ();
131 }
132 
133 
134 /* Remove the corresponding arguments from the PHI nodes in E's
135    destination block and redirect it to DEST.  Return redirected edge.
136    The list of removed arguments is stored in a vector accessed
137    through edge_var_maps.  */
138 
139 edge
ssa_redirect_edge(edge e,basic_block dest)140 ssa_redirect_edge (edge e, basic_block dest)
141 {
142   gphi_iterator gsi;
143   gphi *phi;
144 
145   redirect_edge_var_map_clear (e);
146 
147   /* Remove the appropriate PHI arguments in E's destination block.
148      If we are redirecting a copied edge the destination has not
149      got PHI argument space reserved nor an interesting argument.  */
150   if (! (e->dest->flags & BB_DUPLICATED))
151     for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152       {
153 	tree def;
154 	location_t locus;
155 
156 	phi = gsi.phi ();
157 	def = gimple_phi_arg_def (phi, e->dest_idx);
158 	locus = gimple_phi_arg_location (phi, e->dest_idx);
159 
160 	if (def == NULL_TREE)
161 	  continue;
162 
163 	redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164       }
165 
166   e = redirect_edge_succ_nodup (e, dest);
167 
168   return e;
169 }
170 
171 
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173    E->dest.  */
174 
175 void
flush_pending_stmts(edge e)176 flush_pending_stmts (edge e)
177 {
178   gphi *phi;
179   edge_var_map *vm;
180   int i;
181   gphi_iterator gsi;
182 
183   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184   if (!v)
185     return;
186 
187   for (gsi = gsi_start_phis (e->dest), i = 0;
188        !gsi_end_p (gsi) && v->iterate (i, &vm);
189        gsi_next (&gsi), i++)
190     {
191       tree def;
192 
193       phi = gsi.phi ();
194       def = redirect_edge_var_map_def (vm);
195       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196     }
197 
198   redirect_edge_var_map_clear (e);
199 }
200 
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203    expression with a different value.
204 
205    This will update any annotations (say debug bind stmts) referring
206    to the original LHS, so that they use the RHS instead.  This is
207    done even if NLHS and LHS are the same, for it is understood that
208    the RHS will be modified afterwards, and NLHS will not be assigned
209    an equivalent value.
210 
211    Adjusting any non-annotation uses of the LHS, if needed, is a
212    responsibility of the caller.
213 
214    The effect of this call should be pretty much the same as that of
215    inserting a copy of STMT before STMT, and then removing the
216    original stmt, at which time gsi_remove() would have update
217    annotations, but using this function saves all the inserting,
218    copying and removing.  */
219 
220 void
gimple_replace_ssa_lhs(gimple * stmt,tree nlhs)221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223   if (MAY_HAVE_DEBUG_BIND_STMTS)
224     {
225       tree lhs = gimple_get_lhs (stmt);
226 
227       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228 
229       insert_debug_temp_for_var_def (NULL, lhs);
230     }
231 
232   gimple_set_lhs (stmt, nlhs);
233 }
234 
235 
236 /* Given a tree for an expression for which we might want to emit
237    locations or values in debug information (generally a variable, but
238    we might deal with other kinds of trees in the future), return the
239    tree that should be used as the variable of a DEBUG_BIND STMT or
240    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
241 
242 tree
target_for_debug_bind(tree var)243 target_for_debug_bind (tree var)
244 {
245   if (!MAY_HAVE_DEBUG_BIND_STMTS)
246     return NULL_TREE;
247 
248   if (TREE_CODE (var) == SSA_NAME)
249     {
250       var = SSA_NAME_VAR (var);
251       if (var == NULL_TREE)
252 	return NULL_TREE;
253     }
254 
255   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256       && TREE_CODE (var) != PARM_DECL)
257     return NULL_TREE;
258 
259   if (DECL_HAS_VALUE_EXPR_P (var))
260     return target_for_debug_bind (DECL_VALUE_EXPR (var));
261 
262   if (DECL_IGNORED_P (var))
263     return NULL_TREE;
264 
265   /* var-tracking only tracks registers.  */
266   if (!is_gimple_reg_type (TREE_TYPE (var)))
267     return NULL_TREE;
268 
269   return var;
270 }
271 
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273    released.  */
274 
275 static tree
find_released_ssa_name(tree * tp,int * walk_subtrees,void * data_)276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277 {
278   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279 
280   if (wi && wi->is_lhs)
281     return NULL_TREE;
282 
283   if (TREE_CODE (*tp) == SSA_NAME)
284     {
285       if (SSA_NAME_IN_FREE_LIST (*tp))
286 	return *tp;
287 
288       *walk_subtrees = 0;
289     }
290   else if (IS_TYPE_OR_DECL_P (*tp))
291     *walk_subtrees = 0;
292 
293   return NULL_TREE;
294 }
295 
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297    by other DEBUG stmts, and replace uses of the DEF with the
298    newly-created debug temp.  */
299 
300 void
insert_debug_temp_for_var_def(gimple_stmt_iterator * gsi,tree var)301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302 {
303   imm_use_iterator imm_iter;
304   use_operand_p use_p;
305   gimple *stmt;
306   gimple *def_stmt = NULL;
307   int usecount = 0;
308   tree value = NULL;
309 
310   if (!MAY_HAVE_DEBUG_BIND_STMTS)
311     return;
312 
313   /* If this name has already been registered for replacement, do nothing
314      as anything that uses this name isn't in SSA form.  */
315   if (name_registered_for_update_p (var))
316     return;
317 
318   /* Check whether there are debug stmts that reference this variable and,
319      if there are, decide whether we should use a debug temp.  */
320   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321     {
322       stmt = USE_STMT (use_p);
323 
324       if (!gimple_debug_bind_p (stmt))
325 	continue;
326 
327       if (usecount++)
328 	break;
329 
330       if (gimple_debug_bind_get_value (stmt) != var)
331 	{
332 	  /* Count this as an additional use, so as to make sure we
333 	     use a temp unless VAR's definition has a SINGLE_RHS that
334 	     can be shared.  */
335 	  usecount++;
336 	  break;
337 	}
338     }
339 
340   if (!usecount)
341     return;
342 
343   if (gsi)
344     def_stmt = gsi_stmt (*gsi);
345   else
346     def_stmt = SSA_NAME_DEF_STMT (var);
347 
348   /* If we didn't get an insertion point, and the stmt has already
349      been removed, we won't be able to insert the debug bind stmt, so
350      we'll have to drop debug information.  */
351   if (gimple_code (def_stmt) == GIMPLE_PHI)
352     {
353       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 	value = NULL;
356       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 	 to.  */
358       else if (value == error_mark_node)
359 	value = NULL;
360     }
361   else if (gimple_clobber_p (def_stmt))
362     /* We can end up here when rewriting a decl into SSA and coming
363        along a clobber for the original decl.  Turn that into
364        # DEBUG decl => NULL  */
365     value = NULL;
366   else if (is_gimple_assign (def_stmt))
367     {
368       bool no_value = false;
369 
370       if (!dom_info_available_p (CDI_DOMINATORS))
371 	{
372 	  struct walk_stmt_info wi;
373 
374 	  memset (&wi, 0, sizeof (wi));
375 
376 	  /* When removing blocks without following reverse dominance
377 	     order, we may sometimes encounter SSA_NAMEs that have
378 	     already been released, referenced in other SSA_DEFs that
379 	     we're about to release.  Consider:
380 
381 	     <bb X>:
382 	     v_1 = foo;
383 
384 	     <bb Y>:
385 	     w_2 = v_1 + bar;
386 	     # DEBUG w => w_2
387 
388 	     If we deleted BB X first, propagating the value of w_2
389 	     won't do us any good.  It's too late to recover their
390 	     original definition of v_1: when it was deleted, it was
391 	     only referenced in other DEFs, it couldn't possibly know
392 	     it should have been retained, and propagating every
393 	     single DEF just in case it might have to be propagated
394 	     into a DEBUG STMT would probably be too wasteful.
395 
396 	     When dominator information is not readily available, we
397 	     check for and accept some loss of debug information.  But
398 	     if it is available, there's no excuse for us to remove
399 	     blocks in the wrong order, so we don't even check for
400 	     dead SSA NAMEs.  SSA verification shall catch any
401 	     errors.  */
402 	  if ((!gsi && !gimple_bb (def_stmt))
403 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
404 	    no_value = true;
405 	}
406 
407       if (!no_value)
408 	value = gimple_assign_rhs_to_tree (def_stmt);
409     }
410 
411   if (value)
412     {
413       /* If there's a single use of VAR, and VAR is the entire debug
414 	 expression (usecount would have been incremented again
415 	 otherwise), and the definition involves only constants and
416 	 SSA names, then we can propagate VALUE into this single use,
417 	 avoiding the temp.
418 
419 	 We can also avoid using a temp if VALUE can be shared and
420 	 propagated into all uses, without generating expressions that
421 	 wouldn't be valid gimple RHSs.
422 
423 	 Other cases that would require unsharing or non-gimple RHSs
424 	 are deferred to a debug temp, although we could avoid temps
425 	 at the expense of duplication of expressions.  */
426 
427       if (CONSTANT_CLASS_P (value)
428 	  || gimple_code (def_stmt) == GIMPLE_PHI
429 	  || (usecount == 1
430 	      && (!gimple_assign_single_p (def_stmt)
431 		  || is_gimple_min_invariant (value)))
432 	  || is_gimple_reg (value))
433 	;
434       else
435 	{
436 	  gdebug *def_temp;
437 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
438 
439 	  def_temp = gimple_build_debug_bind (vexpr,
440 					      unshare_expr (value),
441 					      def_stmt);
442 
443 	  DECL_ARTIFICIAL (vexpr) = 1;
444 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
445 	  if (DECL_P (value))
446 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
447 	  else
448 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
449 
450 	  if (gsi)
451 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
452 	  else
453 	    {
454 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
455 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
456 	    }
457 
458 	  value = vexpr;
459 	}
460     }
461 
462   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
463     {
464       if (!gimple_debug_bind_p (stmt))
465 	continue;
466 
467       if (value)
468 	{
469 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
470 	    /* unshare_expr is not needed here.  vexpr is either a
471 	       SINGLE_RHS, that can be safely shared, some other RHS
472 	       that was unshared when we found it had a single debug
473 	       use, or a DEBUG_EXPR_DECL, that can be safely
474 	       shared.  */
475 	    SET_USE (use_p, unshare_expr (value));
476 	  /* If we didn't replace uses with a debug decl fold the
477 	     resulting expression.  Otherwise we end up with invalid IL.  */
478 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
479 	    {
480 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
481 	      fold_stmt_inplace (&gsi);
482 	    }
483 	}
484       else
485 	gimple_debug_bind_reset_value (stmt);
486 
487       update_stmt (stmt);
488     }
489 }
490 
491 
492 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
493    other DEBUG stmts, and replace uses of the DEF with the
494    newly-created debug temp.  */
495 
496 void
insert_debug_temps_for_defs(gimple_stmt_iterator * gsi)497 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
498 {
499   gimple *stmt;
500   ssa_op_iter op_iter;
501   def_operand_p def_p;
502 
503   if (!MAY_HAVE_DEBUG_BIND_STMTS)
504     return;
505 
506   stmt = gsi_stmt (*gsi);
507 
508   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
509     {
510       tree var = DEF_FROM_PTR (def_p);
511 
512       if (TREE_CODE (var) != SSA_NAME)
513 	continue;
514 
515       insert_debug_temp_for_var_def (gsi, var);
516     }
517 }
518 
519 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
520 
521 void
reset_debug_uses(gimple * stmt)522 reset_debug_uses (gimple *stmt)
523 {
524   ssa_op_iter op_iter;
525   def_operand_p def_p;
526   imm_use_iterator imm_iter;
527   gimple *use_stmt;
528 
529   if (!MAY_HAVE_DEBUG_BIND_STMTS)
530     return;
531 
532   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
533     {
534       tree var = DEF_FROM_PTR (def_p);
535 
536       if (TREE_CODE (var) != SSA_NAME)
537 	continue;
538 
539       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
540 	{
541 	  if (!gimple_debug_bind_p (use_stmt))
542 	    continue;
543 
544 	  gimple_debug_bind_reset_value (use_stmt);
545 	  update_stmt (use_stmt);
546 	}
547     }
548 }
549 
550 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
551    dominated stmts before their dominators, so that release_ssa_defs
552    stands a chance of propagating DEFs into debug bind stmts.  */
553 
554 void
release_defs_bitset(bitmap toremove)555 release_defs_bitset (bitmap toremove)
556 {
557   unsigned j;
558   bitmap_iterator bi;
559 
560   /* Performing a topological sort is probably overkill, this will
561      most likely run in slightly superlinear time, rather than the
562      pathological quadratic worst case.
563      But iterate from max SSA name version to min one because
564      that mimics allocation order during code generation behavior best.
565      Use an array for this which we compact on-the-fly with a NULL
566      marker moving towards the end of the vector.  */
567   auto_vec<tree, 16> names;
568   names.reserve (bitmap_count_bits (toremove) + 1);
569   names.quick_push (NULL_TREE);
570   EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
571     names.quick_push (ssa_name (j));
572 
573   bitmap_tree_view (toremove);
574   while (!bitmap_empty_p (toremove))
575     {
576       j = names.length () - 1;
577       for (unsigned i = names.length () - 1; names[i];)
578 	{
579 	  bool remove_now = true;
580 	  tree var = names[i];
581 	  gimple *stmt;
582 	  imm_use_iterator uit;
583 
584 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
585 	    {
586 	      ssa_op_iter dit;
587 	      def_operand_p def_p;
588 
589 	      /* We can't propagate PHI nodes into debug stmts.  */
590 	      if (gimple_code (stmt) == GIMPLE_PHI
591 		  || is_gimple_debug (stmt))
592 		continue;
593 
594 	      /* If we find another definition to remove that uses
595 		 the one we're looking at, defer the removal of this
596 		 one, so that it can be propagated into debug stmts
597 		 after the other is.  */
598 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
599 		{
600 		  tree odef = DEF_FROM_PTR (def_p);
601 
602 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
603 		    {
604 		      remove_now = false;
605 		      break;
606 		    }
607 		}
608 
609 	      if (!remove_now)
610 		BREAK_FROM_IMM_USE_STMT (uit);
611 	    }
612 
613 	  if (remove_now)
614 	    {
615 	      gimple *def = SSA_NAME_DEF_STMT (var);
616 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
617 
618 	      if (gimple_code (def) == GIMPLE_PHI)
619 		remove_phi_node (&gsi, true);
620 	      else
621 		{
622 		  gsi_remove (&gsi, true);
623 		  release_defs (def);
624 		}
625 	      bitmap_clear_bit (toremove, SSA_NAME_VERSION (var));
626 	    }
627 	  else
628 	    --i;
629 	  if (--j != i)
630 	    names[i] = names[j];
631 	}
632     }
633   bitmap_list_view (toremove);
634 }
635 
636 /* Disable warnings about missing quoting in GCC diagnostics for
637    the verification errors.  Their format strings don't follow GCC
638    diagnostic conventions and the calls are ultimately followed by
639    one to internal_error.  */
640 #if __GNUC__ >= 10
641 #  pragma GCC diagnostic push
642 #  pragma GCC diagnostic ignored "-Wformat-diag"
643 #endif
644 
645 /* Verify virtual SSA form.  */
646 
647 bool
verify_vssa(basic_block bb,tree current_vdef,sbitmap visited)648 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
649 {
650   bool err = false;
651 
652   if (bitmap_bit_p (visited, bb->index))
653     return false;
654 
655   bitmap_set_bit (visited, bb->index);
656 
657   /* Pick up the single virtual PHI def.  */
658   gphi *phi = NULL;
659   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
660        gsi_next (&si))
661     {
662       tree res = gimple_phi_result (si.phi ());
663       if (virtual_operand_p (res))
664 	{
665 	  if (phi)
666 	    {
667 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
668 	      print_gimple_stmt (stderr, phi, 0);
669 	      print_gimple_stmt (stderr, si.phi (), 0);
670 	      err = true;
671 	    }
672 	  else
673 	    phi = si.phi ();
674 	}
675     }
676   if (phi)
677     {
678       current_vdef = gimple_phi_result (phi);
679       if (TREE_CODE (current_vdef) != SSA_NAME)
680 	{
681 	  error ("virtual definition is not an SSA name");
682 	  print_gimple_stmt (stderr, phi, 0);
683 	  err = true;
684 	}
685     }
686 
687   /* Verify stmts.  */
688   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
689        gsi_next (&gsi))
690     {
691       gimple *stmt = gsi_stmt (gsi);
692       tree vuse = gimple_vuse (stmt);
693       if (vuse)
694 	{
695 	  if (vuse != current_vdef)
696 	    {
697 	      error ("stmt with wrong VUSE");
698 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
699 	      fprintf (stderr, "expected ");
700 	      print_generic_expr (stderr, current_vdef);
701 	      fprintf (stderr, "\n");
702 	      err = true;
703 	    }
704 	  tree vdef = gimple_vdef (stmt);
705 	  if (vdef)
706 	    {
707 	      current_vdef = vdef;
708 	      if (TREE_CODE (current_vdef) != SSA_NAME)
709 		{
710 		  error ("virtual definition is not an SSA name");
711 		  print_gimple_stmt (stderr, phi, 0);
712 		  err = true;
713 		}
714 	    }
715 	}
716     }
717 
718   /* Verify destination PHI uses and recurse.  */
719   edge_iterator ei;
720   edge e;
721   FOR_EACH_EDGE (e, ei, bb->succs)
722     {
723       gphi *phi = get_virtual_phi (e->dest);
724       if (phi
725 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
726 	{
727 	  error ("PHI node with wrong VUSE on edge from BB %d",
728 		 e->src->index);
729 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
730 	  fprintf (stderr, "expected ");
731 	  print_generic_expr (stderr, current_vdef);
732 	  fprintf (stderr, "\n");
733 	  err = true;
734 	}
735 
736       /* Recurse.  */
737       err |= verify_vssa (e->dest, current_vdef, visited);
738     }
739 
740   return err;
741 }
742 
743 /* Return true if SSA_NAME is malformed and mark it visited.
744 
745    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
746       operand.  */
747 
748 static bool
verify_ssa_name(tree ssa_name,bool is_virtual)749 verify_ssa_name (tree ssa_name, bool is_virtual)
750 {
751   if (TREE_CODE (ssa_name) != SSA_NAME)
752     {
753       error ("expected an SSA_NAME object");
754       return true;
755     }
756 
757   if (SSA_NAME_IN_FREE_LIST (ssa_name))
758     {
759       error ("found an SSA_NAME that had been released into the free pool");
760       return true;
761     }
762 
763   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
764       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
765     {
766       error ("type mismatch between an SSA_NAME and its symbol");
767       return true;
768     }
769 
770   if (is_virtual && !virtual_operand_p (ssa_name))
771     {
772       error ("found a virtual definition for a GIMPLE register");
773       return true;
774     }
775 
776   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
777     {
778       error ("virtual SSA name for non-VOP decl");
779       return true;
780     }
781 
782   if (!is_virtual && virtual_operand_p (ssa_name))
783     {
784       error ("found a real definition for a non-register");
785       return true;
786     }
787 
788   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
789       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
790     {
791       error ("found a default name with a non-empty defining statement");
792       return true;
793     }
794 
795   return false;
796 }
797 
798 
799 /* Return true if the definition of SSA_NAME at block BB is malformed.
800 
801    STMT is the statement where SSA_NAME is created.
802 
803    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
804       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
805       it means that the block in that array slot contains the
806       definition of SSA_NAME.
807 
808    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
809 
810 static bool
verify_def(basic_block bb,basic_block * definition_block,tree ssa_name,gimple * stmt,bool is_virtual)811 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
812 	    gimple *stmt, bool is_virtual)
813 {
814   if (verify_ssa_name (ssa_name, is_virtual))
815     goto err;
816 
817   if (SSA_NAME_VAR (ssa_name)
818       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
819       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
820     {
821       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
822       goto err;
823     }
824 
825   if (definition_block[SSA_NAME_VERSION (ssa_name)])
826     {
827       error ("SSA_NAME created in two different blocks %i and %i",
828 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
829       goto err;
830     }
831 
832   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
833 
834   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
835     {
836       error ("SSA_NAME_DEF_STMT is wrong");
837       fprintf (stderr, "Expected definition statement:\n");
838       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
839       fprintf (stderr, "\nActual definition statement:\n");
840       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
841       goto err;
842     }
843 
844   return false;
845 
846 err:
847   fprintf (stderr, "while verifying SSA_NAME ");
848   print_generic_expr (stderr, ssa_name);
849   fprintf (stderr, " in statement\n");
850   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
851 
852   return true;
853 }
854 
855 
856 /* Return true if the use of SSA_NAME at statement STMT in block BB is
857    malformed.
858 
859    DEF_BB is the block where SSA_NAME was found to be created.
860 
861    IDOM contains immediate dominator information for the flowgraph.
862 
863    CHECK_ABNORMAL is true if the caller wants to check whether this use
864       is flowing through an abnormal edge (only used when checking PHI
865       arguments).
866 
867    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
868      that are defined before STMT in basic block BB.  */
869 
870 static bool
verify_use(basic_block bb,basic_block def_bb,use_operand_p use_p,gimple * stmt,bool check_abnormal,bitmap names_defined_in_bb)871 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
872 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
873 {
874   bool err = false;
875   tree ssa_name = USE_FROM_PTR (use_p);
876 
877   if (!TREE_VISITED (ssa_name))
878     if (verify_imm_links (stderr, ssa_name))
879       err = true;
880 
881   TREE_VISITED (ssa_name) = 1;
882 
883   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
884       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
885     ; /* Default definitions have empty statements.  Nothing to do.  */
886   else if (!def_bb)
887     {
888       error ("missing definition");
889       err = true;
890     }
891   else if (bb != def_bb
892 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
893     {
894       error ("definition in block %i does not dominate use in block %i",
895 	     def_bb->index, bb->index);
896       err = true;
897     }
898   else if (bb == def_bb
899 	   && names_defined_in_bb != NULL
900 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
901     {
902       error ("definition in block %i follows the use", def_bb->index);
903       err = true;
904     }
905 
906   if (check_abnormal
907       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
908     {
909       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
910       err = true;
911     }
912 
913   /* Make sure the use is in an appropriate list by checking the previous
914      element to make sure it's the same.  */
915   if (use_p->prev == NULL)
916     {
917       error ("no immediate_use list");
918       err = true;
919     }
920   else
921     {
922       tree listvar;
923       if (use_p->prev->use == NULL)
924 	listvar = use_p->prev->loc.ssa_name;
925       else
926 	listvar = USE_FROM_PTR (use_p->prev);
927       if (listvar != ssa_name)
928         {
929 	  error ("wrong immediate use list");
930 	  err = true;
931 	}
932     }
933 
934   if (err)
935     {
936       fprintf (stderr, "for SSA_NAME: ");
937       print_generic_expr (stderr, ssa_name, TDF_VOPS);
938       fprintf (stderr, " in statement:\n");
939       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
940     }
941 
942   return err;
943 }
944 
945 
946 /* Return true if any of the arguments for PHI node PHI at block BB is
947    malformed.
948 
949    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
950       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
951       it means that the block in that array slot contains the
952       definition of SSA_NAME.  */
953 
954 static bool
verify_phi_args(gphi * phi,basic_block bb,basic_block * definition_block)955 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
956 {
957   edge e;
958   bool err = false;
959   size_t i, phi_num_args = gimple_phi_num_args (phi);
960 
961   if (EDGE_COUNT (bb->preds) != phi_num_args)
962     {
963       error ("incoming edge count does not match number of PHI arguments");
964       err = true;
965       goto error;
966     }
967 
968   for (i = 0; i < phi_num_args; i++)
969     {
970       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
971       tree op = USE_FROM_PTR (op_p);
972 
973       e = EDGE_PRED (bb, i);
974 
975       if (op == NULL_TREE)
976 	{
977 	  error ("PHI argument is missing for edge %d->%d",
978 	         e->src->index,
979 		 e->dest->index);
980 	  err = true;
981 	  goto error;
982 	}
983 
984       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
985 	{
986 	  error ("PHI argument is not SSA_NAME, or invariant");
987 	  err = true;
988 	}
989 
990       if (TREE_CODE (op) == SSA_NAME)
991 	{
992 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
993 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
994 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
995 	}
996 
997       if (TREE_CODE (op) == ADDR_EXPR)
998 	{
999 	  tree base = TREE_OPERAND (op, 0);
1000 	  while (handled_component_p (base))
1001 	    base = TREE_OPERAND (base, 0);
1002 	  if ((VAR_P (base)
1003 	       || TREE_CODE (base) == PARM_DECL
1004 	       || TREE_CODE (base) == RESULT_DECL)
1005 	      && !TREE_ADDRESSABLE (base))
1006 	    {
1007 	      error ("address taken, but ADDRESSABLE bit not set");
1008 	      err = true;
1009 	    }
1010 	}
1011 
1012       if (e->dest != bb)
1013 	{
1014 	  error ("wrong edge %d->%d for PHI argument",
1015 	         e->src->index, e->dest->index);
1016 	  err = true;
1017 	}
1018 
1019       if (err)
1020 	{
1021 	  fprintf (stderr, "PHI argument\n");
1022 	  print_generic_stmt (stderr, op, TDF_VOPS);
1023 	  goto error;
1024 	}
1025     }
1026 
1027 error:
1028   if (err)
1029     {
1030       fprintf (stderr, "for PHI node\n");
1031       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1032     }
1033 
1034 
1035   return err;
1036 }
1037 
1038 
1039 /* Verify common invariants in the SSA web.
1040    TODO: verify the variable annotations.  */
1041 
1042 DEBUG_FUNCTION void
verify_ssa(bool check_modified_stmt,bool check_ssa_operands)1043 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1044 {
1045   basic_block bb;
1046   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1047   ssa_op_iter iter;
1048   tree op;
1049   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1050   auto_bitmap names_defined_in_bb;
1051 
1052   gcc_assert (!need_ssa_update_p (cfun));
1053 
1054   timevar_push (TV_TREE_SSA_VERIFY);
1055 
1056     {
1057       /* Keep track of SSA names present in the IL.  */
1058       size_t i;
1059       tree name;
1060       hash_map <void *, tree> ssa_info;
1061 
1062       FOR_EACH_SSA_NAME (i, name, cfun)
1063 	{
1064 	  gimple *stmt;
1065 	  TREE_VISITED (name) = 0;
1066 
1067 	  verify_ssa_name (name, virtual_operand_p (name));
1068 
1069 	  stmt = SSA_NAME_DEF_STMT (name);
1070 	  if (!gimple_nop_p (stmt))
1071 	    {
1072 	      basic_block bb = gimple_bb (stmt);
1073 	      if (verify_def (bb, definition_block,
1074 			      name, stmt, virtual_operand_p (name)))
1075 		goto err;
1076 	    }
1077 
1078 	  void *info = NULL;
1079 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1080 	    info = SSA_NAME_PTR_INFO (name);
1081 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1082 	    info = SSA_NAME_RANGE_INFO (name);
1083 	  if (info)
1084 	    {
1085 	      bool existed;
1086 	      tree &val = ssa_info.get_or_insert (info, &existed);
1087 	      if (existed)
1088 		{
1089 		  error ("shared SSA name info");
1090 		  print_generic_expr (stderr, val);
1091 		  fprintf (stderr, " and ");
1092 		  print_generic_expr (stderr, name);
1093 		  fprintf (stderr, "\n");
1094 		  goto err;
1095 		}
1096 	      else
1097 		val = name;
1098 	    }
1099 	}
1100     }
1101 
1102   calculate_dominance_info (CDI_DOMINATORS);
1103 
1104   /* Now verify all the uses and make sure they agree with the definitions
1105      found in the previous pass.  */
1106   FOR_EACH_BB_FN (bb, cfun)
1107     {
1108       edge e;
1109       edge_iterator ei;
1110 
1111       /* Make sure that all edges have a clear 'aux' field.  */
1112       FOR_EACH_EDGE (e, ei, bb->preds)
1113 	{
1114 	  if (e->aux)
1115 	    {
1116 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1117 		      e->dest->index);
1118 	      goto err;
1119 	    }
1120 	}
1121 
1122       /* Verify the arguments for every PHI node in the block.  */
1123       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1124 	{
1125 	  gphi *phi = gsi.phi ();
1126 	  if (verify_phi_args (phi, bb, definition_block))
1127 	    goto err;
1128 
1129 	  bitmap_set_bit (names_defined_in_bb,
1130 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1131 	}
1132 
1133       /* Now verify all the uses and vuses in every statement of the block.  */
1134       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1135 	   gsi_next (&gsi))
1136 	{
1137 	  gimple *stmt = gsi_stmt (gsi);
1138 	  use_operand_p use_p;
1139 
1140 	  if (check_modified_stmt && gimple_modified_p (stmt))
1141 	    {
1142 	      error ("stmt (%p) marked modified after optimization pass: ",
1143 		     (void *)stmt);
1144 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1145 	      goto err;
1146 	    }
1147 
1148 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1149 	    {
1150 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1151 	      goto err;
1152 	    }
1153 
1154 	  if (gimple_debug_bind_p (stmt)
1155 	      && !gimple_debug_bind_has_value_p (stmt))
1156 	    continue;
1157 
1158 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1159 	    {
1160 	      op = USE_FROM_PTR (use_p);
1161 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1162 			      use_p, stmt, false, names_defined_in_bb))
1163 		goto err;
1164 	    }
1165 
1166 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1167 	    {
1168 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1169 		{
1170 		  error ("SSA_NAME_DEF_STMT is wrong");
1171 		  fprintf (stderr, "Expected definition statement:\n");
1172 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1173 		  fprintf (stderr, "\nActual definition statement:\n");
1174 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1175 				     4, TDF_VOPS);
1176 		  goto err;
1177 		}
1178 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1179 	    }
1180 	}
1181 
1182       bitmap_clear (names_defined_in_bb);
1183     }
1184 
1185   free (definition_block);
1186 
1187   if (gimple_vop (cfun)
1188       && ssa_default_def (cfun, gimple_vop (cfun)))
1189     {
1190       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1191       bitmap_clear (visited);
1192       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1193 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1194 	goto err;
1195     }
1196 
1197   /* Restore the dominance information to its prior known state, so
1198      that we do not perturb the compiler's subsequent behavior.  */
1199   if (orig_dom_state == DOM_NONE)
1200     free_dominance_info (CDI_DOMINATORS);
1201   else
1202     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1203 
1204   timevar_pop (TV_TREE_SSA_VERIFY);
1205   return;
1206 
1207 err:
1208   internal_error ("verify_ssa failed");
1209 }
1210 
1211 #if __GNUC__ >= 10
1212 #  pragma GCC diagnostic pop
1213 #endif
1214 
1215 /* Initialize global DFA and SSA structures.  */
1216 
1217 void
init_tree_ssa(struct function * fn)1218 init_tree_ssa (struct function *fn)
1219 {
1220   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1221   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1222   pt_solution_reset (&fn->gimple_df->escaped);
1223   init_ssanames (fn, 0);
1224 }
1225 
1226 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1227 
1228 void
delete_tree_ssa(struct function * fn)1229 delete_tree_ssa (struct function *fn)
1230 {
1231   fini_ssanames (fn);
1232 
1233   /* We no longer maintain the SSA operand cache at this point.  */
1234   if (ssa_operands_active (fn))
1235     fini_ssa_operands (fn);
1236 
1237   fn->gimple_df->default_defs->empty ();
1238   fn->gimple_df->default_defs = NULL;
1239   pt_solution_reset (&fn->gimple_df->escaped);
1240   if (fn->gimple_df->decls_to_pointers != NULL)
1241     delete fn->gimple_df->decls_to_pointers;
1242   fn->gimple_df->decls_to_pointers = NULL;
1243   fn->gimple_df = NULL;
1244 
1245   /* We no longer need the edge variable maps.  */
1246   redirect_edge_var_map_empty ();
1247 }
1248 
1249 /* Return true if EXPR is a useless type conversion, otherwise return
1250    false.  */
1251 
1252 bool
tree_ssa_useless_type_conversion(tree expr)1253 tree_ssa_useless_type_conversion (tree expr)
1254 {
1255   /* If we have an assignment that merely uses a NOP_EXPR to change
1256      the top of the RHS to the type of the LHS and the type conversion
1257      is "safe", then strip away the type conversion so that we can
1258      enter LHS = RHS into the const_and_copies table.  */
1259   if (CONVERT_EXPR_P (expr)
1260       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1261       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1262     return useless_type_conversion_p
1263       (TREE_TYPE (expr),
1264        TREE_TYPE (TREE_OPERAND (expr, 0)));
1265 
1266   return false;
1267 }
1268 
1269 /* Strip conversions from EXP according to
1270    tree_ssa_useless_type_conversion and return the resulting
1271    expression.  */
1272 
1273 tree
tree_ssa_strip_useless_type_conversions(tree exp)1274 tree_ssa_strip_useless_type_conversions (tree exp)
1275 {
1276   while (tree_ssa_useless_type_conversion (exp))
1277     exp = TREE_OPERAND (exp, 0);
1278   return exp;
1279 }
1280 
1281 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1282 
1283 bool
ssa_defined_default_def_p(tree t)1284 ssa_defined_default_def_p (tree t)
1285 {
1286   tree var = SSA_NAME_VAR (t);
1287 
1288   if (!var)
1289     ;
1290   /* Parameters get their initial value from the function entry.  */
1291   else if (TREE_CODE (var) == PARM_DECL)
1292     return true;
1293   /* When returning by reference the return address is actually a hidden
1294      parameter.  */
1295   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1296     return true;
1297   /* Hard register variables get their initial value from the ether.  */
1298   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1299     return true;
1300 
1301   return false;
1302 }
1303 
1304 
1305 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1306    should be returned if the value is only partially undefined.  */
1307 
1308 bool
ssa_undefined_value_p(tree t,bool partial)1309 ssa_undefined_value_p (tree t, bool partial)
1310 {
1311   gimple *def_stmt;
1312 
1313   if (ssa_defined_default_def_p (t))
1314     return false;
1315 
1316   /* The value is undefined iff its definition statement is empty.  */
1317   def_stmt = SSA_NAME_DEF_STMT (t);
1318   if (gimple_nop_p (def_stmt))
1319     return true;
1320 
1321   /* Check if the complex was not only partially defined.  */
1322   if (partial && is_gimple_assign (def_stmt)
1323       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1324     {
1325       tree rhs1, rhs2;
1326 
1327       rhs1 = gimple_assign_rhs1 (def_stmt);
1328       rhs2 = gimple_assign_rhs2 (def_stmt);
1329       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1330 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1331     }
1332   return false;
1333 }
1334 
1335 
1336 /* Return TRUE iff STMT, a gimple statement, references an undefined
1337    SSA name.  */
1338 
1339 bool
gimple_uses_undefined_value_p(gimple * stmt)1340 gimple_uses_undefined_value_p (gimple *stmt)
1341 {
1342   ssa_op_iter iter;
1343   tree op;
1344 
1345   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1346     if (ssa_undefined_value_p (op))
1347       return true;
1348 
1349   return false;
1350 }
1351 
1352 
1353 
1354 /* If necessary, rewrite the base of the reference tree *TP from
1355    a MEM_REF to a plain or converted symbol.  */
1356 
1357 static void
maybe_rewrite_mem_ref_base(tree * tp,bitmap suitable_for_renaming)1358 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1359 {
1360   tree sym;
1361 
1362   while (handled_component_p (*tp))
1363     tp = &TREE_OPERAND (*tp, 0);
1364   if (TREE_CODE (*tp) == MEM_REF
1365       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1366       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1367       && DECL_P (sym)
1368       && !TREE_ADDRESSABLE (sym)
1369       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1370       && is_gimple_reg_type (TREE_TYPE (*tp))
1371       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1372     {
1373       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1374 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1375 					TREE_TYPE (TREE_TYPE (sym)))
1376 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1377 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1378 	{
1379 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1380 			TYPE_SIZE (TREE_TYPE (*tp)),
1381 			int_const_binop (MULT_EXPR,
1382 					 bitsize_int (BITS_PER_UNIT),
1383 					 TREE_OPERAND (*tp, 1)));
1384 	}
1385       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1386 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1387 					     TREE_TYPE (TREE_TYPE (sym))))
1388 	{
1389 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1390 			? REALPART_EXPR : IMAGPART_EXPR,
1391 			TREE_TYPE (*tp), sym);
1392 	}
1393       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1394 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1395 	{
1396 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1397 					  TREE_TYPE (sym)))
1398 	    *tp = build1 (VIEW_CONVERT_EXPR,
1399 			  TREE_TYPE (*tp), sym);
1400 	  else
1401 	    *tp = sym;
1402 	}
1403       else if (DECL_SIZE (sym)
1404 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1405 	       && (known_subrange_p
1406 		   (mem_ref_offset (*tp),
1407 		    wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1408 		    0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1409 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1410 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1411 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1412 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (sym))
1413 		   || type_has_mode_precision_p (TREE_TYPE (sym)))
1414 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1415 				  BITS_PER_UNIT) == 0)
1416 	{
1417 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1418 			TYPE_SIZE (TREE_TYPE (*tp)),
1419 			wide_int_to_tree (bitsizetype,
1420 					  mem_ref_offset (*tp)
1421 					  << LOG2_BITS_PER_UNIT));
1422 	}
1423     }
1424 }
1425 
1426 /* For a tree REF return its base if it is the base of a MEM_REF
1427    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1428 
1429 static tree
non_rewritable_mem_ref_base(tree ref)1430 non_rewritable_mem_ref_base (tree ref)
1431 {
1432   tree base;
1433 
1434   /* A plain decl does not need it set.  */
1435   if (DECL_P (ref))
1436     return NULL_TREE;
1437 
1438   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1439     {
1440       base = get_base_address (ref);
1441       if (DECL_P (base))
1442 	return base;
1443       return NULL_TREE;
1444     }
1445 
1446   /* But watch out for MEM_REFs we cannot lower to a
1447      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1448   if (TREE_CODE (base) == MEM_REF
1449       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1450     {
1451       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1452       if (! DECL_P (decl))
1453 	return NULL_TREE;
1454       if (! is_gimple_reg_type (TREE_TYPE (base))
1455 	  || VOID_TYPE_P (TREE_TYPE (base))
1456 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1457 	return decl;
1458       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1459 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1460 	  && useless_type_conversion_p (TREE_TYPE (base),
1461 					TREE_TYPE (TREE_TYPE (decl)))
1462 	  && known_ge (mem_ref_offset (base), 0)
1463 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1464 		       mem_ref_offset (base))
1465 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1466 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1467 	return NULL_TREE;
1468       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1469       if (integer_zerop (TREE_OPERAND (base, 1))
1470 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1471 	return NULL_TREE;
1472       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1473       if (DECL_SIZE (decl)
1474 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1475 	  && (known_subrange_p
1476 	      (mem_ref_offset (base),
1477 	       wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1478 	       0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1479 	  /* ???  We can't handle bitfield precision extracts without
1480 	     either using an alternate type for the BIT_FIELD_REF and
1481 	     then doing a conversion or possibly adjusting the offset
1482 	     according to endianness.  */
1483 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1484 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1485 		  == TYPE_PRECISION (TREE_TYPE (base))))
1486 	  /* ???  Likewise for extracts from bitfields, we'd have
1487 	     to pun the base object to a size precision mode first.  */
1488 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1489 	      || type_has_mode_precision_p (TREE_TYPE (decl)))
1490 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1491 			     BITS_PER_UNIT) == 0)
1492 	return NULL_TREE;
1493       return decl;
1494     }
1495 
1496   return NULL_TREE;
1497 }
1498 
1499 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1500    Otherwise return true.  */
1501 
1502 static bool
non_rewritable_lvalue_p(tree lhs)1503 non_rewritable_lvalue_p (tree lhs)
1504 {
1505   /* A plain decl is always rewritable.  */
1506   if (DECL_P (lhs))
1507     return false;
1508 
1509   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1510      a reasonably efficient manner... */
1511   if ((TREE_CODE (lhs) == REALPART_EXPR
1512        || TREE_CODE (lhs) == IMAGPART_EXPR)
1513       && DECL_P (TREE_OPERAND (lhs, 0)))
1514     return false;
1515 
1516   /* ???  The following could be relaxed allowing component
1517      references that do not change the access size.  */
1518   if (TREE_CODE (lhs) == MEM_REF
1519       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1520     {
1521       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1522 
1523       /* A decl that is wrapped inside a MEM-REF that covers
1524 	 it full is also rewritable.  */
1525       if (integer_zerop (TREE_OPERAND (lhs, 1))
1526 	  && DECL_P (decl)
1527 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1528 	  /* If the dynamic type of the decl has larger precision than
1529 	     the decl itself we can't use the decls type for SSA rewriting.  */
1530 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1531 	       || compare_tree_int (DECL_SIZE (decl),
1532 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1533 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1534 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1535 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1536 	  /* Make sure we are not re-writing non-float copying into float
1537 	     copying as that can incur normalization.  */
1538 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1539 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1540 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1541 	return false;
1542 
1543       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1544 	 using a BIT_INSERT_EXPR.  */
1545       if (DECL_P (decl)
1546 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1547 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1548 	  && known_ge (mem_ref_offset (lhs), 0)
1549 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1550 		       mem_ref_offset (lhs))
1551 	  && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1552 			    TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
1553 	  && known_ge (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (decl))),
1554 		       wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (lhs)))))
1555 	{
1556 	  poly_uint64 lhs_bits, nelts;
1557 	  if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)), &lhs_bits)
1558 	      && multiple_p (lhs_bits,
1559 			     tree_to_uhwi
1560 			       (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl)))),
1561 			     &nelts)
1562 	      && valid_vector_subparts_p (nelts))
1563 	    {
1564 	      if (known_eq (nelts, 1u))
1565 		return false;
1566 	      /* For sub-vector inserts the insert vector mode has to be
1567 		 supported.  */
1568 	      tree vtype = build_vector_type (TREE_TYPE (TREE_TYPE (decl)),
1569 					      nelts);
1570 	      if (TYPE_MODE (vtype) != BLKmode)
1571 		return false;
1572 	    }
1573 	}
1574     }
1575 
1576   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1577      BIT_INSERT_EXPR.  */
1578   if (TREE_CODE (lhs) == BIT_FIELD_REF
1579       && DECL_P (TREE_OPERAND (lhs, 0))
1580       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1581       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1582       && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1583 			  TYPE_SIZE_UNIT
1584 			    (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1585       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1586 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1587     return false;
1588 
1589   return true;
1590 }
1591 
1592 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1593    mark the variable VAR for conversion into SSA.  Return true when updating
1594    stmts is required.  */
1595 
1596 static void
maybe_optimize_var(tree var,bitmap addresses_taken,bitmap not_reg_needs,bitmap suitable_for_renaming)1597 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1598 		    bitmap suitable_for_renaming)
1599 {
1600   /* Global Variables, result decls cannot be changed.  */
1601   if (is_global_var (var)
1602       || TREE_CODE (var) == RESULT_DECL
1603       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1604     return;
1605 
1606   if (TREE_ADDRESSABLE (var)
1607       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1608 	 a non-register.  Otherwise we are confused and forget to
1609 	 add virtual operands for it.  */
1610       && (!is_gimple_reg_type (TREE_TYPE (var))
1611 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1612 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1613 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1614     {
1615       TREE_ADDRESSABLE (var) = 0;
1616       /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P
1617          is unset if we cannot rewrite the var into SSA.  */
1618       if ((TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1619 	   || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
1620 	  && bitmap_bit_p (not_reg_needs, DECL_UID (var)))
1621 	DECL_GIMPLE_REG_P (var) = 0;
1622       if (is_gimple_reg (var))
1623 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1624       if (dump_file)
1625 	{
1626 	  fprintf (dump_file, "No longer having address taken: ");
1627 	  print_generic_expr (dump_file, var);
1628 	  fprintf (dump_file, "\n");
1629 	}
1630     }
1631 
1632   if (!DECL_GIMPLE_REG_P (var)
1633       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1634       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1635 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1636       && !TREE_THIS_VOLATILE (var)
1637       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1638     {
1639       DECL_GIMPLE_REG_P (var) = 1;
1640       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1641       if (dump_file)
1642 	{
1643 	  fprintf (dump_file, "Now a gimple register: ");
1644 	  print_generic_expr (dump_file, var);
1645 	  fprintf (dump_file, "\n");
1646 	}
1647     }
1648 }
1649 
1650 /* Return true when STMT is ASAN mark where second argument is an address
1651    of a local variable.  */
1652 
1653 static bool
is_asan_mark_p(gimple * stmt)1654 is_asan_mark_p (gimple *stmt)
1655 {
1656   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1657     return false;
1658 
1659   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1660   if (TREE_CODE (addr) == ADDR_EXPR
1661       && VAR_P (TREE_OPERAND (addr, 0)))
1662     {
1663       tree var = TREE_OPERAND (addr, 0);
1664       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1665 			    DECL_ATTRIBUTES (var)))
1666 	return false;
1667 
1668       unsigned addressable = TREE_ADDRESSABLE (var);
1669       TREE_ADDRESSABLE (var) = 0;
1670       bool r = is_gimple_reg (var);
1671       TREE_ADDRESSABLE (var) = addressable;
1672       return r;
1673     }
1674 
1675   return false;
1676 }
1677 
1678 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1679 
1680 void
execute_update_addresses_taken(void)1681 execute_update_addresses_taken (void)
1682 {
1683   basic_block bb;
1684   auto_bitmap addresses_taken;
1685   auto_bitmap not_reg_needs;
1686   auto_bitmap suitable_for_renaming;
1687   tree var;
1688   unsigned i;
1689 
1690   timevar_push (TV_ADDRESS_TAKEN);
1691 
1692   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1693      the function body.  */
1694   FOR_EACH_BB_FN (bb, cfun)
1695     {
1696       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1697 	   gsi_next (&gsi))
1698 	{
1699 	  gimple *stmt = gsi_stmt (gsi);
1700 	  enum gimple_code code = gimple_code (stmt);
1701 	  tree decl;
1702 
1703 	  if (code == GIMPLE_CALL)
1704 	    {
1705 	      if (optimize_atomic_compare_exchange_p (stmt))
1706 		{
1707 		  /* For __atomic_compare_exchange_N if the second argument
1708 		     is &var, don't mark var addressable;
1709 		     if it becomes non-addressable, we'll rewrite it into
1710 		     ATOMIC_COMPARE_EXCHANGE call.  */
1711 		  tree arg = gimple_call_arg (stmt, 1);
1712 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1713 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1714 		  gimple_call_set_arg (stmt, 1, arg);
1715 		}
1716 	      else if (is_asan_mark_p (stmt)
1717 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1718 		;
1719 	      else
1720 		gimple_ior_addresses_taken (addresses_taken, stmt);
1721 	    }
1722 	  else
1723 	    /* Note all addresses taken by the stmt.  */
1724 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1725 
1726 	  /* If we have a call or an assignment, see if the lhs contains
1727 	     a local decl that requires not to be a gimple register.  */
1728 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1729 	    {
1730               tree lhs = gimple_get_lhs (stmt);
1731               if (lhs
1732 		  && TREE_CODE (lhs) != SSA_NAME
1733 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1734 		      || non_rewritable_lvalue_p (lhs)))
1735 		{
1736 		  decl = get_base_address (lhs);
1737 		  if (DECL_P (decl))
1738 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1739                 }
1740 	    }
1741 
1742 	  if (gimple_assign_single_p (stmt))
1743 	    {
1744 	      tree rhs = gimple_assign_rhs1 (stmt);
1745 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1746 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1747 	    }
1748 
1749 	  else if (code == GIMPLE_CALL)
1750 	    {
1751 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1752 		{
1753 		  tree arg = gimple_call_arg (stmt, i);
1754 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1755 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1756 		}
1757 	    }
1758 
1759 	  else if (code == GIMPLE_ASM)
1760 	    {
1761 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1762 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1763 		{
1764 		  tree link = gimple_asm_output_op (asm_stmt, i);
1765 		  tree lhs = TREE_VALUE (link);
1766 		  if (TREE_CODE (lhs) != SSA_NAME)
1767 		    {
1768 		      decl = get_base_address (lhs);
1769 		      if (DECL_P (decl)
1770 			  && (non_rewritable_lvalue_p (lhs)
1771 			      /* We cannot move required conversions from
1772 				 the lhs to the rhs in asm statements, so
1773 				 require we do not need any.  */
1774 			      || !useless_type_conversion_p
1775 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1776 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1777 		    }
1778 		}
1779 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1780 		{
1781 		  tree link = gimple_asm_input_op (asm_stmt, i);
1782 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1783 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1784 		}
1785 	    }
1786 	}
1787 
1788       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1789 	   gsi_next (&gsi))
1790 	{
1791 	  size_t i;
1792 	  gphi *phi = gsi.phi ();
1793 
1794 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1795 	    {
1796 	      tree op = PHI_ARG_DEF (phi, i), var;
1797 	      if (TREE_CODE (op) == ADDR_EXPR
1798 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1799 		  && DECL_P (var))
1800 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1801 	    }
1802 	}
1803     }
1804 
1805   /* We cannot iterate over all referenced vars because that can contain
1806      unused vars from BLOCK trees, which causes code generation differences
1807      for -g vs. -g0.  */
1808   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1809     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1810 			suitable_for_renaming);
1811 
1812   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1813     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1814 			suitable_for_renaming);
1815 
1816   /* Operand caches need to be recomputed for operands referencing the updated
1817      variables and operands need to be rewritten to expose bare symbols.  */
1818   if (!bitmap_empty_p (suitable_for_renaming))
1819     {
1820       FOR_EACH_BB_FN (bb, cfun)
1821 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1822 	  {
1823 	    gimple *stmt = gsi_stmt (gsi);
1824 
1825 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1826 	       rewrite into SSA form.  */
1827 	    if (gimple_assign_single_p (stmt))
1828 	      {
1829 		tree lhs = gimple_assign_lhs (stmt);
1830 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1831 		tree sym;
1832 
1833 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1834 		   gimplify_modify_expr_complex_part.  */
1835 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1836 		     || TREE_CODE (lhs) == REALPART_EXPR)
1837 		    && DECL_P (TREE_OPERAND (lhs, 0))
1838 		    && bitmap_bit_p (suitable_for_renaming,
1839 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1840 		  {
1841 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1842 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1843 					? REALPART_EXPR : IMAGPART_EXPR,
1844 					TREE_TYPE (other),
1845 					TREE_OPERAND (lhs, 0));
1846 		    gimple *load = gimple_build_assign (other, lrhs);
1847 		    location_t loc = gimple_location (stmt);
1848 		    gimple_set_location (load, loc);
1849 		    gimple_set_vuse (load, gimple_vuse (stmt));
1850 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1851 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1852 		    gimple_assign_set_rhs_with_ops
1853 		      (&gsi, COMPLEX_EXPR,
1854 		       TREE_CODE (lhs) == IMAGPART_EXPR
1855 		       ? other : gimple_assign_rhs1 (stmt),
1856 		       TREE_CODE (lhs) == IMAGPART_EXPR
1857 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1858 		    stmt = gsi_stmt (gsi);
1859 		    unlink_stmt_vdef (stmt);
1860 		    update_stmt (stmt);
1861 		    continue;
1862 		  }
1863 
1864 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1865 		   into a BIT_INSERT_EXPR.  */
1866 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1867 		    && DECL_P (TREE_OPERAND (lhs, 0))
1868 		    && bitmap_bit_p (suitable_for_renaming,
1869 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1870 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1871 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1872 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1873 					TYPE_SIZE_UNIT (TREE_TYPE
1874 					  (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1875 					0)
1876 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1877 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1878 		  {
1879 		    tree var = TREE_OPERAND (lhs, 0);
1880 		    tree val = gimple_assign_rhs1 (stmt);
1881 		    if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1882 					      TREE_TYPE (val)))
1883 		      {
1884 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1885 			gimple *pun
1886 			  = gimple_build_assign (tem,
1887 						 build1 (VIEW_CONVERT_EXPR,
1888 							 TREE_TYPE (tem), val));
1889 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1890 			val = tem;
1891 		      }
1892 		    tree bitpos = TREE_OPERAND (lhs, 2);
1893 		    gimple_assign_set_lhs (stmt, var);
1894 		    gimple_assign_set_rhs_with_ops
1895 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1896 		    stmt = gsi_stmt (gsi);
1897 		    unlink_stmt_vdef (stmt);
1898 		    update_stmt (stmt);
1899 		    continue;
1900 		  }
1901 
1902 		/* Rewrite a vector insert using a MEM_REF on the LHS
1903 		   into a BIT_INSERT_EXPR.  */
1904 		if (TREE_CODE (lhs) == MEM_REF
1905 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1906 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1907 		    && DECL_P (sym)
1908 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1909 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1910 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1911 		    /* If it is a full replacement we can do better below.  */
1912 		    && maybe_ne (wi::to_poly_offset
1913 				   (TYPE_SIZE_UNIT (TREE_TYPE (lhs))),
1914 				 wi::to_poly_offset
1915                                    (TYPE_SIZE_UNIT (TREE_TYPE (sym))))
1916 		    && known_ge (mem_ref_offset (lhs), 0)
1917 		    && known_gt (wi::to_poly_offset
1918 				   (TYPE_SIZE_UNIT (TREE_TYPE (sym))),
1919 				 mem_ref_offset (lhs))
1920 		    && multiple_of_p (sizetype,
1921 				      TREE_OPERAND (lhs, 1),
1922 				      TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1923 		  {
1924 		    tree val = gimple_assign_rhs1 (stmt);
1925 		    if (! types_compatible_p (TREE_TYPE (val),
1926 					      TREE_TYPE (TREE_TYPE (sym))))
1927 		      {
1928 			poly_uint64 lhs_bits, nelts;
1929 			tree temtype = TREE_TYPE (TREE_TYPE (sym));
1930 			if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)),
1931 					     &lhs_bits)
1932 			    && multiple_p (lhs_bits,
1933 					   tree_to_uhwi
1934 					     (TYPE_SIZE (TREE_TYPE
1935 							   (TREE_TYPE (sym)))),
1936 					   &nelts)
1937 			    && maybe_ne (nelts, 1u)
1938 			    && valid_vector_subparts_p (nelts))
1939 			  temtype = build_vector_type (temtype, nelts);
1940 			tree tem = make_ssa_name (temtype);
1941 			gimple *pun
1942 			  = gimple_build_assign (tem,
1943 						 build1 (VIEW_CONVERT_EXPR,
1944 							 TREE_TYPE (tem), val));
1945 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1946 			val = tem;
1947 		      }
1948 		    tree bitpos
1949 		      = wide_int_to_tree (bitsizetype,
1950 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1951 		    gimple_assign_set_lhs (stmt, sym);
1952 		    gimple_assign_set_rhs_with_ops
1953 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1954 		    stmt = gsi_stmt (gsi);
1955 		    unlink_stmt_vdef (stmt);
1956 		    update_stmt (stmt);
1957 		    continue;
1958 		  }
1959 
1960 		/* We shouldn't have any fancy wrapping of
1961 		   component-refs on the LHS, but look through
1962 		   VIEW_CONVERT_EXPRs as that is easy.  */
1963 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1964 		  lhs = TREE_OPERAND (lhs, 0);
1965 		if (TREE_CODE (lhs) == MEM_REF
1966 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1967 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1968 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1969 		    && DECL_P (sym)
1970 		    && !TREE_ADDRESSABLE (sym)
1971 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1972 		  lhs = sym;
1973 		else
1974 		  lhs = gimple_assign_lhs (stmt);
1975 
1976 		/* Rewrite the RHS and make sure the resulting assignment
1977 		   is validly typed.  */
1978 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1979 		rhs = gimple_assign_rhs1 (stmt);
1980 		if (gimple_assign_lhs (stmt) != lhs
1981 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1982 						   TREE_TYPE (rhs)))
1983 		  {
1984 		    if (gimple_clobber_p (stmt))
1985 		      {
1986 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1987 			TREE_THIS_VOLATILE (rhs) = 1;
1988 		      }
1989 		    else
1990 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1991 					 TREE_TYPE (lhs), rhs);
1992 		  }
1993 		if (gimple_assign_lhs (stmt) != lhs)
1994 		  gimple_assign_set_lhs (stmt, lhs);
1995 
1996 		if (gimple_assign_rhs1 (stmt) != rhs)
1997 		  {
1998 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1999 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
2000 		  }
2001 	      }
2002 
2003 	    else if (gimple_code (stmt) == GIMPLE_CALL)
2004 	      {
2005 		unsigned i;
2006 		if (optimize_atomic_compare_exchange_p (stmt))
2007 		  {
2008 		    tree expected = gimple_call_arg (stmt, 1);
2009 		    if (bitmap_bit_p (suitable_for_renaming,
2010 				      DECL_UID (TREE_OPERAND (expected, 0))))
2011 		      {
2012 			fold_builtin_atomic_compare_exchange (&gsi);
2013 			continue;
2014 		      }
2015 		  }
2016 		else if (is_asan_mark_p (stmt))
2017 		  {
2018 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
2019 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2020 		      {
2021 			unlink_stmt_vdef (stmt);
2022 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
2023 			  {
2024 			    gcall *call
2025 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
2026 			    gimple_call_set_lhs (call, var);
2027 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
2028 			  }
2029 			else
2030 			  {
2031 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
2032 			       is uninitialized.  Avoid dependencies on
2033 			       previous out of scope value.  */
2034 			    tree clobber = build_clobber (TREE_TYPE (var));
2035 			    gimple *g = gimple_build_assign (var, clobber);
2036 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
2037 			  }
2038 			continue;
2039 		      }
2040 		  }
2041 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
2042 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
2043 		    {
2044 		      tree *argp = gimple_call_arg_ptr (stmt, i);
2045 		      if (*argp == null_pointer_node)
2046 			continue;
2047 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
2048 				  && VAR_P (TREE_OPERAND (*argp, 0)));
2049 		      tree var = TREE_OPERAND (*argp, 0);
2050 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2051 			*argp = null_pointer_node;
2052 		    }
2053 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
2054 		  {
2055 		    tree *argp = gimple_call_arg_ptr (stmt, i);
2056 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
2057 		  }
2058 	      }
2059 
2060 	    else if (gimple_code (stmt) == GIMPLE_ASM)
2061 	      {
2062 		gasm *asm_stmt = as_a <gasm *> (stmt);
2063 		unsigned i;
2064 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
2065 		  {
2066 		    tree link = gimple_asm_output_op (asm_stmt, i);
2067 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2068 						suitable_for_renaming);
2069 		  }
2070 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2071 		  {
2072 		    tree link = gimple_asm_input_op (asm_stmt, i);
2073 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2074 						suitable_for_renaming);
2075 		  }
2076 	      }
2077 
2078 	    else if (gimple_debug_bind_p (stmt)
2079 		     && gimple_debug_bind_has_value_p (stmt))
2080 	      {
2081 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2082 		tree decl;
2083 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2084 		decl = non_rewritable_mem_ref_base (*valuep);
2085 		if (decl
2086 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2087 		  gimple_debug_bind_reset_value (stmt);
2088 	      }
2089 
2090 	    if (gimple_references_memory_p (stmt)
2091 		|| is_gimple_debug (stmt))
2092 	      update_stmt (stmt);
2093 
2094 	    gsi_next (&gsi);
2095 	  }
2096 
2097       /* Update SSA form here, we are called as non-pass as well.  */
2098       if (number_of_loops (cfun) > 1
2099 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2100 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2101       else
2102 	update_ssa (TODO_update_ssa);
2103     }
2104 
2105   timevar_pop (TV_ADDRESS_TAKEN);
2106 }
2107 
2108 namespace {
2109 
2110 const pass_data pass_data_update_address_taken =
2111 {
2112   GIMPLE_PASS, /* type */
2113   "addressables", /* name */
2114   OPTGROUP_NONE, /* optinfo_flags */
2115   TV_ADDRESS_TAKEN, /* tv_id */
2116   PROP_ssa, /* properties_required */
2117   0, /* properties_provided */
2118   0, /* properties_destroyed */
2119   0, /* todo_flags_start */
2120   TODO_update_address_taken, /* todo_flags_finish */
2121 };
2122 
2123 class pass_update_address_taken : public gimple_opt_pass
2124 {
2125 public:
pass_update_address_taken(gcc::context * ctxt)2126   pass_update_address_taken (gcc::context *ctxt)
2127     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2128   {}
2129 
2130   /* opt_pass methods: */
2131 
2132 }; // class pass_update_address_taken
2133 
2134 } // anon namespace
2135 
2136 gimple_opt_pass *
make_pass_update_address_taken(gcc::context * ctxt)2137 make_pass_update_address_taken (gcc::context *ctxt)
2138 {
2139   return new pass_update_address_taken (ctxt);
2140 }
2141