1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
redirect_edge_var_map_add(edge e,tree result,tree def,location_t locus)55 redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus)
56 {
57   edge_var_map new_node;
58 
59   if (edge_var_maps == NULL)
60     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61 
62   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63   new_node.def = def;
64   new_node.result = result;
65   new_node.locus = locus;
66 
67   slot.safe_push (new_node);
68 }
69 
70 
71 /* Clear the var mappings in edge E.  */
72 
73 void
redirect_edge_var_map_clear(edge e)74 redirect_edge_var_map_clear (edge e)
75 {
76   if (!edge_var_maps)
77     return;
78 
79   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
80 
81   if (head)
82     head->release ();
83 }
84 
85 
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
87 
88    This assumes a hash_map can have multiple edges mapping to the same
89    var_map (many to one mapping), since we don't remove the previous mappings.
90    */
91 
92 void
redirect_edge_var_map_dup(edge newe,edge olde)93 redirect_edge_var_map_dup (edge newe, edge olde)
94 {
95   if (!edge_var_maps)
96     return;
97 
98   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100   if (!old_head)
101     return;
102 
103   new_head->safe_splice (*old_head);
104 }
105 
106 
107 /* Return the variable mappings for a given edge.  If there is none, return
108    NULL.  */
109 
110 vec<edge_var_map> *
redirect_edge_var_map_vector(edge e)111 redirect_edge_var_map_vector (edge e)
112 {
113   /* Hey, what kind of idiot would... you'd be surprised.  */
114   if (!edge_var_maps)
115     return NULL;
116 
117   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118   if (!slot)
119     return NULL;
120 
121   return slot;
122 }
123 
124 /* Clear the edge variable mappings.  */
125 
126 void
redirect_edge_var_map_empty(void)127 redirect_edge_var_map_empty (void)
128 {
129   if (edge_var_maps)
130     edge_var_maps->empty ();
131 }
132 
133 
134 /* Remove the corresponding arguments from the PHI nodes in E's
135    destination block and redirect it to DEST.  Return redirected edge.
136    The list of removed arguments is stored in a vector accessed
137    through edge_var_maps.  */
138 
139 edge
ssa_redirect_edge(edge e,basic_block dest)140 ssa_redirect_edge (edge e, basic_block dest)
141 {
142   gphi_iterator gsi;
143   gphi *phi;
144 
145   redirect_edge_var_map_clear (e);
146 
147   /* Remove the appropriate PHI arguments in E's destination block.
148      If we are redirecting a copied edge the destination has not
149      got PHI argument space reserved nor an interesting argument.  */
150   if (! (e->dest->flags & BB_DUPLICATED))
151     for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152       {
153 	tree def;
154 	location_t locus;
155 
156 	phi = gsi.phi ();
157 	def = gimple_phi_arg_def (phi, e->dest_idx);
158 	locus = gimple_phi_arg_location (phi, e->dest_idx);
159 
160 	if (def == NULL_TREE)
161 	  continue;
162 
163 	redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164       }
165 
166   e = redirect_edge_succ_nodup (e, dest);
167 
168   return e;
169 }
170 
171 
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173    E->dest.  */
174 
175 void
flush_pending_stmts(edge e)176 flush_pending_stmts (edge e)
177 {
178   gphi *phi;
179   edge_var_map *vm;
180   int i;
181   gphi_iterator gsi;
182 
183   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184   if (!v)
185     return;
186 
187   for (gsi = gsi_start_phis (e->dest), i = 0;
188        !gsi_end_p (gsi) && v->iterate (i, &vm);
189        gsi_next (&gsi), i++)
190     {
191       tree def;
192 
193       phi = gsi.phi ();
194       def = redirect_edge_var_map_def (vm);
195       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196     }
197 
198   redirect_edge_var_map_clear (e);
199 }
200 
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203    expression with a different value.
204 
205    This will update any annotations (say debug bind stmts) referring
206    to the original LHS, so that they use the RHS instead.  This is
207    done even if NLHS and LHS are the same, for it is understood that
208    the RHS will be modified afterwards, and NLHS will not be assigned
209    an equivalent value.
210 
211    Adjusting any non-annotation uses of the LHS, if needed, is a
212    responsibility of the caller.
213 
214    The effect of this call should be pretty much the same as that of
215    inserting a copy of STMT before STMT, and then removing the
216    original stmt, at which time gsi_remove() would have update
217    annotations, but using this function saves all the inserting,
218    copying and removing.  */
219 
220 void
gimple_replace_ssa_lhs(gimple * stmt,tree nlhs)221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223   if (MAY_HAVE_DEBUG_BIND_STMTS)
224     {
225       tree lhs = gimple_get_lhs (stmt);
226 
227       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228 
229       insert_debug_temp_for_var_def (NULL, lhs);
230     }
231 
232   gimple_set_lhs (stmt, nlhs);
233 }
234 
235 
236 /* Given a tree for an expression for which we might want to emit
237    locations or values in debug information (generally a variable, but
238    we might deal with other kinds of trees in the future), return the
239    tree that should be used as the variable of a DEBUG_BIND STMT or
240    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
241 
242 tree
target_for_debug_bind(tree var)243 target_for_debug_bind (tree var)
244 {
245   if (!MAY_HAVE_DEBUG_BIND_STMTS)
246     return NULL_TREE;
247 
248   if (TREE_CODE (var) == SSA_NAME)
249     {
250       var = SSA_NAME_VAR (var);
251       if (var == NULL_TREE)
252 	return NULL_TREE;
253     }
254 
255   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256       && TREE_CODE (var) != PARM_DECL)
257     return NULL_TREE;
258 
259   if (DECL_HAS_VALUE_EXPR_P (var))
260     return target_for_debug_bind (DECL_VALUE_EXPR (var));
261 
262   if (DECL_IGNORED_P (var))
263     return NULL_TREE;
264 
265   /* var-tracking only tracks registers.  */
266   if (!is_gimple_reg_type (TREE_TYPE (var)))
267     return NULL_TREE;
268 
269   return var;
270 }
271 
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273    released.  */
274 
275 static tree
find_released_ssa_name(tree * tp,int * walk_subtrees,void * data_)276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277 {
278   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279 
280   if (wi && wi->is_lhs)
281     return NULL_TREE;
282 
283   if (TREE_CODE (*tp) == SSA_NAME)
284     {
285       if (SSA_NAME_IN_FREE_LIST (*tp))
286 	return *tp;
287 
288       *walk_subtrees = 0;
289     }
290   else if (IS_TYPE_OR_DECL_P (*tp))
291     *walk_subtrees = 0;
292 
293   return NULL_TREE;
294 }
295 
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297    by other DEBUG stmts, and replace uses of the DEF with the
298    newly-created debug temp.  */
299 
300 void
insert_debug_temp_for_var_def(gimple_stmt_iterator * gsi,tree var)301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302 {
303   imm_use_iterator imm_iter;
304   use_operand_p use_p;
305   gimple *stmt;
306   gimple *def_stmt = NULL;
307   int usecount = 0;
308   tree value = NULL;
309 
310   if (!MAY_HAVE_DEBUG_BIND_STMTS)
311     return;
312 
313   /* If this name has already been registered for replacement, do nothing
314      as anything that uses this name isn't in SSA form.  */
315   if (name_registered_for_update_p (var))
316     return;
317 
318   /* Check whether there are debug stmts that reference this variable and,
319      if there are, decide whether we should use a debug temp.  */
320   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321     {
322       stmt = USE_STMT (use_p);
323 
324       if (!gimple_debug_bind_p (stmt))
325 	continue;
326 
327       if (usecount++)
328 	break;
329 
330       if (gimple_debug_bind_get_value (stmt) != var)
331 	{
332 	  /* Count this as an additional use, so as to make sure we
333 	     use a temp unless VAR's definition has a SINGLE_RHS that
334 	     can be shared.  */
335 	  usecount++;
336 	  break;
337 	}
338     }
339 
340   if (!usecount)
341     return;
342 
343   if (gsi)
344     def_stmt = gsi_stmt (*gsi);
345   else
346     def_stmt = SSA_NAME_DEF_STMT (var);
347 
348   /* If we didn't get an insertion point, and the stmt has already
349      been removed, we won't be able to insert the debug bind stmt, so
350      we'll have to drop debug information.  */
351   if (gimple_code (def_stmt) == GIMPLE_PHI)
352     {
353       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 	value = NULL;
356       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 	 to.  */
358       else if (value == error_mark_node)
359 	value = NULL;
360     }
361   else if (gimple_clobber_p (def_stmt))
362     /* We can end up here when rewriting a decl into SSA and coming
363        along a clobber for the original decl.  Turn that into
364        # DEBUG decl => NULL  */
365     value = NULL;
366   else if (is_gimple_assign (def_stmt))
367     {
368       bool no_value = false;
369 
370       if (!dom_info_available_p (CDI_DOMINATORS))
371 	{
372 	  struct walk_stmt_info wi;
373 
374 	  memset (&wi, 0, sizeof (wi));
375 
376 	  /* When removing blocks without following reverse dominance
377 	     order, we may sometimes encounter SSA_NAMEs that have
378 	     already been released, referenced in other SSA_DEFs that
379 	     we're about to release.  Consider:
380 
381 	     <bb X>:
382 	     v_1 = foo;
383 
384 	     <bb Y>:
385 	     w_2 = v_1 + bar;
386 	     # DEBUG w => w_2
387 
388 	     If we deleted BB X first, propagating the value of w_2
389 	     won't do us any good.  It's too late to recover their
390 	     original definition of v_1: when it was deleted, it was
391 	     only referenced in other DEFs, it couldn't possibly know
392 	     it should have been retained, and propagating every
393 	     single DEF just in case it might have to be propagated
394 	     into a DEBUG STMT would probably be too wasteful.
395 
396 	     When dominator information is not readily available, we
397 	     check for and accept some loss of debug information.  But
398 	     if it is available, there's no excuse for us to remove
399 	     blocks in the wrong order, so we don't even check for
400 	     dead SSA NAMEs.  SSA verification shall catch any
401 	     errors.  */
402 	  if ((!gsi && !gimple_bb (def_stmt))
403 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
404 	    no_value = true;
405 	}
406 
407       if (!no_value)
408 	value = gimple_assign_rhs_to_tree (def_stmt);
409     }
410 
411   if (value)
412     {
413       /* If there's a single use of VAR, and VAR is the entire debug
414 	 expression (usecount would have been incremented again
415 	 otherwise), and the definition involves only constants and
416 	 SSA names, then we can propagate VALUE into this single use,
417 	 avoiding the temp.
418 
419 	 We can also avoid using a temp if VALUE can be shared and
420 	 propagated into all uses, without generating expressions that
421 	 wouldn't be valid gimple RHSs.
422 
423 	 Other cases that would require unsharing or non-gimple RHSs
424 	 are deferred to a debug temp, although we could avoid temps
425 	 at the expense of duplication of expressions.  */
426 
427       if (CONSTANT_CLASS_P (value)
428 	  || gimple_code (def_stmt) == GIMPLE_PHI
429 	  || (usecount == 1
430 	      && (!gimple_assign_single_p (def_stmt)
431 		  || is_gimple_min_invariant (value)))
432 	  || is_gimple_reg (value))
433 	;
434       else
435 	{
436 	  gdebug *def_temp;
437 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
438 
439 	  def_temp = gimple_build_debug_bind (vexpr,
440 					      unshare_expr (value),
441 					      def_stmt);
442 
443 	  DECL_ARTIFICIAL (vexpr) = 1;
444 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
445 	  if (DECL_P (value))
446 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
447 	  else
448 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
449 
450 	  if (gsi)
451 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
452 	  else
453 	    {
454 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
455 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
456 	    }
457 
458 	  value = vexpr;
459 	}
460     }
461 
462   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
463     {
464       if (!gimple_debug_bind_p (stmt))
465 	continue;
466 
467       if (value)
468 	{
469 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
470 	    /* unshare_expr is not needed here.  vexpr is either a
471 	       SINGLE_RHS, that can be safely shared, some other RHS
472 	       that was unshared when we found it had a single debug
473 	       use, or a DEBUG_EXPR_DECL, that can be safely
474 	       shared.  */
475 	    SET_USE (use_p, unshare_expr (value));
476 	  /* If we didn't replace uses with a debug decl fold the
477 	     resulting expression.  Otherwise we end up with invalid IL.  */
478 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
479 	    {
480 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
481 	      fold_stmt_inplace (&gsi);
482 	    }
483 	}
484       else
485 	gimple_debug_bind_reset_value (stmt);
486 
487       update_stmt (stmt);
488     }
489 }
490 
491 
492 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
493    other DEBUG stmts, and replace uses of the DEF with the
494    newly-created debug temp.  */
495 
496 void
insert_debug_temps_for_defs(gimple_stmt_iterator * gsi)497 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
498 {
499   gimple *stmt;
500   ssa_op_iter op_iter;
501   def_operand_p def_p;
502 
503   if (!MAY_HAVE_DEBUG_BIND_STMTS)
504     return;
505 
506   stmt = gsi_stmt (*gsi);
507 
508   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
509     {
510       tree var = DEF_FROM_PTR (def_p);
511 
512       if (TREE_CODE (var) != SSA_NAME)
513 	continue;
514 
515       insert_debug_temp_for_var_def (gsi, var);
516     }
517 }
518 
519 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
520 
521 void
reset_debug_uses(gimple * stmt)522 reset_debug_uses (gimple *stmt)
523 {
524   ssa_op_iter op_iter;
525   def_operand_p def_p;
526   imm_use_iterator imm_iter;
527   gimple *use_stmt;
528 
529   if (!MAY_HAVE_DEBUG_BIND_STMTS)
530     return;
531 
532   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
533     {
534       tree var = DEF_FROM_PTR (def_p);
535 
536       if (TREE_CODE (var) != SSA_NAME)
537 	continue;
538 
539       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
540 	{
541 	  if (!gimple_debug_bind_p (use_stmt))
542 	    continue;
543 
544 	  gimple_debug_bind_reset_value (use_stmt);
545 	  update_stmt (use_stmt);
546 	}
547     }
548 }
549 
550 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
551    dominated stmts before their dominators, so that release_ssa_defs
552    stands a chance of propagating DEFs into debug bind stmts.  */
553 
554 void
release_defs_bitset(bitmap toremove)555 release_defs_bitset (bitmap toremove)
556 {
557   unsigned j;
558   bitmap_iterator bi;
559 
560   /* Performing a topological sort is probably overkill, this will
561      most likely run in slightly superlinear time, rather than the
562      pathological quadratic worst case.
563      But iterate from max SSA name version to min one because
564      that mimics allocation order during code generation behavior best.
565      Use an array for this which we compact on-the-fly with a NULL
566      marker moving towards the end of the vector.  */
567   auto_vec<tree, 16> names;
568   names.reserve (bitmap_count_bits (toremove) + 1);
569   names.quick_push (NULL_TREE);
570   EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
571     names.quick_push (ssa_name (j));
572 
573   bitmap_tree_view (toremove);
574   while (!bitmap_empty_p (toremove))
575     {
576       j = names.length () - 1;
577       for (unsigned i = names.length () - 1; names[i];)
578 	{
579 	  bool remove_now = true;
580 	  tree var = names[i];
581 	  gimple *stmt;
582 	  imm_use_iterator uit;
583 
584 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
585 	    {
586 	      ssa_op_iter dit;
587 	      def_operand_p def_p;
588 
589 	      /* We can't propagate PHI nodes into debug stmts.  */
590 	      if (gimple_code (stmt) == GIMPLE_PHI
591 		  || is_gimple_debug (stmt))
592 		continue;
593 
594 	      /* If we find another definition to remove that uses
595 		 the one we're looking at, defer the removal of this
596 		 one, so that it can be propagated into debug stmts
597 		 after the other is.  */
598 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
599 		{
600 		  tree odef = DEF_FROM_PTR (def_p);
601 
602 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
603 		    {
604 		      remove_now = false;
605 		      break;
606 		    }
607 		}
608 
609 	      if (!remove_now)
610 		BREAK_FROM_IMM_USE_STMT (uit);
611 	    }
612 
613 	  if (remove_now)
614 	    {
615 	      gimple *def = SSA_NAME_DEF_STMT (var);
616 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
617 
618 	      if (gimple_code (def) == GIMPLE_PHI)
619 		remove_phi_node (&gsi, true);
620 	      else
621 		{
622 		  gsi_remove (&gsi, true);
623 		  release_defs (def);
624 		}
625 	      bitmap_clear_bit (toremove, SSA_NAME_VERSION (var));
626 	    }
627 	  else
628 	    --i;
629 	  if (--j != i)
630 	    names[i] = names[j];
631 	}
632     }
633   bitmap_list_view (toremove);
634 }
635 
636 /* Disable warnings about missing quoting in GCC diagnostics for
637    the verification errors.  Their format strings don't follow GCC
638    diagnostic conventions and the calls are ultimately followed by
639    one to internal_error.  */
640 #if __GNUC__ >= 10
641 #  pragma GCC diagnostic push
642 #  pragma GCC diagnostic ignored "-Wformat-diag"
643 #endif
644 
645 /* Verify virtual SSA form.  */
646 
647 bool
verify_vssa(basic_block bb,tree current_vdef,sbitmap visited)648 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
649 {
650   bool err = false;
651 
652   if (bitmap_bit_p (visited, bb->index))
653     return false;
654 
655   bitmap_set_bit (visited, bb->index);
656 
657   /* Pick up the single virtual PHI def.  */
658   gphi *phi = NULL;
659   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
660        gsi_next (&si))
661     {
662       tree res = gimple_phi_result (si.phi ());
663       if (virtual_operand_p (res))
664 	{
665 	  if (phi)
666 	    {
667 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
668 	      print_gimple_stmt (stderr, phi, 0);
669 	      print_gimple_stmt (stderr, si.phi (), 0);
670 	      err = true;
671 	    }
672 	  else
673 	    phi = si.phi ();
674 	}
675     }
676   if (phi)
677     {
678       current_vdef = gimple_phi_result (phi);
679       if (TREE_CODE (current_vdef) != SSA_NAME)
680 	{
681 	  error ("virtual definition is not an SSA name");
682 	  print_gimple_stmt (stderr, phi, 0);
683 	  err = true;
684 	}
685     }
686 
687   /* Verify stmts.  */
688   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
689        gsi_next (&gsi))
690     {
691       gimple *stmt = gsi_stmt (gsi);
692       tree vuse = gimple_vuse (stmt);
693       if (vuse)
694 	{
695 	  if (vuse != current_vdef)
696 	    {
697 	      error ("stmt with wrong VUSE");
698 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
699 	      fprintf (stderr, "expected ");
700 	      print_generic_expr (stderr, current_vdef);
701 	      fprintf (stderr, "\n");
702 	      err = true;
703 	    }
704 	  tree vdef = gimple_vdef (stmt);
705 	  if (vdef)
706 	    {
707 	      current_vdef = vdef;
708 	      if (TREE_CODE (current_vdef) != SSA_NAME)
709 		{
710 		  error ("virtual definition is not an SSA name");
711 		  print_gimple_stmt (stderr, phi, 0);
712 		  err = true;
713 		}
714 	    }
715 	}
716     }
717 
718   /* Verify destination PHI uses and recurse.  */
719   edge_iterator ei;
720   edge e;
721   FOR_EACH_EDGE (e, ei, bb->succs)
722     {
723       gphi *phi = get_virtual_phi (e->dest);
724       if (phi
725 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
726 	{
727 	  error ("PHI node with wrong VUSE on edge from BB %d",
728 		 e->src->index);
729 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
730 	  fprintf (stderr, "expected ");
731 	  print_generic_expr (stderr, current_vdef);
732 	  fprintf (stderr, "\n");
733 	  err = true;
734 	}
735 
736       /* Recurse.  */
737       err |= verify_vssa (e->dest, current_vdef, visited);
738     }
739 
740   return err;
741 }
742 
743 /* Return true if SSA_NAME is malformed and mark it visited.
744 
745    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
746       operand.  */
747 
748 static bool
verify_ssa_name(tree ssa_name,bool is_virtual)749 verify_ssa_name (tree ssa_name, bool is_virtual)
750 {
751   if (TREE_CODE (ssa_name) != SSA_NAME)
752     {
753       error ("expected an SSA_NAME object");
754       return true;
755     }
756 
757   if (SSA_NAME_IN_FREE_LIST (ssa_name))
758     {
759       error ("found an SSA_NAME that had been released into the free pool");
760       return true;
761     }
762 
763   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
764       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
765     {
766       error ("type mismatch between an SSA_NAME and its symbol");
767       return true;
768     }
769 
770   if (is_virtual && !virtual_operand_p (ssa_name))
771     {
772       error ("found a virtual definition for a GIMPLE register");
773       return true;
774     }
775 
776   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
777     {
778       error ("virtual SSA name for non-VOP decl");
779       return true;
780     }
781 
782   if (!is_virtual && virtual_operand_p (ssa_name))
783     {
784       error ("found a real definition for a non-register");
785       return true;
786     }
787 
788   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
789       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
790     {
791       error ("found a default name with a non-empty defining statement");
792       return true;
793     }
794 
795   return false;
796 }
797 
798 
799 /* Return true if the definition of SSA_NAME at block BB is malformed.
800 
801    STMT is the statement where SSA_NAME is created.
802 
803    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
804       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
805       it means that the block in that array slot contains the
806       definition of SSA_NAME.
807 
808    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
809 
810 static bool
verify_def(basic_block bb,basic_block * definition_block,tree ssa_name,gimple * stmt,bool is_virtual)811 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
812 	    gimple *stmt, bool is_virtual)
813 {
814   if (verify_ssa_name (ssa_name, is_virtual))
815     goto err;
816 
817   if (SSA_NAME_VAR (ssa_name)
818       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
819       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
820     {
821       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
822       goto err;
823     }
824 
825   if (definition_block[SSA_NAME_VERSION (ssa_name)])
826     {
827       error ("SSA_NAME created in two different blocks %i and %i",
828 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
829       goto err;
830     }
831 
832   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
833 
834   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
835     {
836       error ("SSA_NAME_DEF_STMT is wrong");
837       fprintf (stderr, "Expected definition statement:\n");
838       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
839       fprintf (stderr, "\nActual definition statement:\n");
840       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
841       goto err;
842     }
843 
844   return false;
845 
846 err:
847   fprintf (stderr, "while verifying SSA_NAME ");
848   print_generic_expr (stderr, ssa_name);
849   fprintf (stderr, " in statement\n");
850   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
851 
852   return true;
853 }
854 
855 
856 /* Return true if the use of SSA_NAME at statement STMT in block BB is
857    malformed.
858 
859    DEF_BB is the block where SSA_NAME was found to be created.
860 
861    IDOM contains immediate dominator information for the flowgraph.
862 
863    CHECK_ABNORMAL is true if the caller wants to check whether this use
864       is flowing through an abnormal edge (only used when checking PHI
865       arguments).
866 
867    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
868      that are defined before STMT in basic block BB.  */
869 
870 static bool
verify_use(basic_block bb,basic_block def_bb,use_operand_p use_p,gimple * stmt,bool check_abnormal,bitmap names_defined_in_bb)871 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
872 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
873 {
874   bool err = false;
875   tree ssa_name = USE_FROM_PTR (use_p);
876 
877   if (!TREE_VISITED (ssa_name))
878     if (verify_imm_links (stderr, ssa_name))
879       err = true;
880 
881   TREE_VISITED (ssa_name) = 1;
882 
883   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
884       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
885     ; /* Default definitions have empty statements.  Nothing to do.  */
886   else if (!def_bb)
887     {
888       error ("missing definition");
889       err = true;
890     }
891   else if (bb != def_bb
892 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
893     {
894       error ("definition in block %i does not dominate use in block %i",
895 	     def_bb->index, bb->index);
896       err = true;
897     }
898   else if (bb == def_bb
899 	   && names_defined_in_bb != NULL
900 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
901     {
902       error ("definition in block %i follows the use", def_bb->index);
903       err = true;
904     }
905 
906   if (check_abnormal
907       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
908     {
909       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
910       err = true;
911     }
912 
913   /* Make sure the use is in an appropriate list by checking the previous
914      element to make sure it's the same.  */
915   if (use_p->prev == NULL)
916     {
917       error ("no immediate_use list");
918       err = true;
919     }
920   else
921     {
922       tree listvar;
923       if (use_p->prev->use == NULL)
924 	listvar = use_p->prev->loc.ssa_name;
925       else
926 	listvar = USE_FROM_PTR (use_p->prev);
927       if (listvar != ssa_name)
928         {
929 	  error ("wrong immediate use list");
930 	  err = true;
931 	}
932     }
933 
934   if (err)
935     {
936       fprintf (stderr, "for SSA_NAME: ");
937       print_generic_expr (stderr, ssa_name, TDF_VOPS);
938       fprintf (stderr, " in statement:\n");
939       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
940     }
941 
942   return err;
943 }
944 
945 
946 /* Return true if any of the arguments for PHI node PHI at block BB is
947    malformed.
948 
949    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
950       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
951       it means that the block in that array slot contains the
952       definition of SSA_NAME.  */
953 
954 static bool
verify_phi_args(gphi * phi,basic_block bb,basic_block * definition_block)955 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
956 {
957   edge e;
958   bool err = false;
959   size_t i, phi_num_args = gimple_phi_num_args (phi);
960 
961   if (EDGE_COUNT (bb->preds) != phi_num_args)
962     {
963       error ("incoming edge count does not match number of PHI arguments");
964       err = true;
965       goto error;
966     }
967 
968   for (i = 0; i < phi_num_args; i++)
969     {
970       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
971       tree op = USE_FROM_PTR (op_p);
972 
973       e = EDGE_PRED (bb, i);
974 
975       if (op == NULL_TREE)
976 	{
977 	  error ("PHI argument is missing for edge %d->%d",
978 	         e->src->index,
979 		 e->dest->index);
980 	  err = true;
981 	  goto error;
982 	}
983 
984       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
985 	{
986 	  error ("PHI argument is not SSA_NAME, or invariant");
987 	  err = true;
988 	}
989 
990       if (TREE_CODE (op) == SSA_NAME)
991 	{
992 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
993 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
994 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
995 	}
996 
997       if (TREE_CODE (op) == ADDR_EXPR)
998 	{
999 	  tree base = TREE_OPERAND (op, 0);
1000 	  while (handled_component_p (base))
1001 	    base = TREE_OPERAND (base, 0);
1002 	  if ((VAR_P (base)
1003 	       || TREE_CODE (base) == PARM_DECL
1004 	       || TREE_CODE (base) == RESULT_DECL)
1005 	      && !TREE_ADDRESSABLE (base))
1006 	    {
1007 	      error ("address taken, but ADDRESSABLE bit not set");
1008 	      err = true;
1009 	    }
1010 	}
1011 
1012       if (e->dest != bb)
1013 	{
1014 	  error ("wrong edge %d->%d for PHI argument",
1015 	         e->src->index, e->dest->index);
1016 	  err = true;
1017 	}
1018 
1019       if (err)
1020 	{
1021 	  fprintf (stderr, "PHI argument\n");
1022 	  print_generic_stmt (stderr, op, TDF_VOPS);
1023 	  goto error;
1024 	}
1025     }
1026 
1027 error:
1028   if (err)
1029     {
1030       fprintf (stderr, "for PHI node\n");
1031       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1032     }
1033 
1034 
1035   return err;
1036 }
1037 
1038 
1039 /* Verify common invariants in the SSA web.
1040    TODO: verify the variable annotations.  */
1041 
1042 DEBUG_FUNCTION void
verify_ssa(bool check_modified_stmt,bool check_ssa_operands)1043 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1044 {
1045   basic_block bb;
1046   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1047   ssa_op_iter iter;
1048   tree op;
1049   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1050   auto_bitmap names_defined_in_bb;
1051 
1052   gcc_assert (!need_ssa_update_p (cfun));
1053 
1054   timevar_push (TV_TREE_SSA_VERIFY);
1055 
1056     {
1057       /* Keep track of SSA names present in the IL.  */
1058       size_t i;
1059       tree name;
1060       hash_map <void *, tree> ssa_info;
1061 
1062       FOR_EACH_SSA_NAME (i, name, cfun)
1063 	{
1064 	  gimple *stmt;
1065 	  TREE_VISITED (name) = 0;
1066 
1067 	  verify_ssa_name (name, virtual_operand_p (name));
1068 
1069 	  stmt = SSA_NAME_DEF_STMT (name);
1070 	  if (!gimple_nop_p (stmt))
1071 	    {
1072 	      basic_block bb = gimple_bb (stmt);
1073 	      if (verify_def (bb, definition_block,
1074 			      name, stmt, virtual_operand_p (name)))
1075 		goto err;
1076 	    }
1077 
1078 	  void *info = NULL;
1079 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1080 	    info = SSA_NAME_PTR_INFO (name);
1081 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1082 	    info = SSA_NAME_RANGE_INFO (name);
1083 	  if (info)
1084 	    {
1085 	      bool existed;
1086 	      tree &val = ssa_info.get_or_insert (info, &existed);
1087 	      if (existed)
1088 		{
1089 		  error ("shared SSA name info");
1090 		  print_generic_expr (stderr, val);
1091 		  fprintf (stderr, " and ");
1092 		  print_generic_expr (stderr, name);
1093 		  fprintf (stderr, "\n");
1094 		  goto err;
1095 		}
1096 	      else
1097 		val = name;
1098 	    }
1099 	}
1100     }
1101 
1102   calculate_dominance_info (CDI_DOMINATORS);
1103 
1104   /* Now verify all the uses and make sure they agree with the definitions
1105      found in the previous pass.  */
1106   FOR_EACH_BB_FN (bb, cfun)
1107     {
1108       edge e;
1109       edge_iterator ei;
1110 
1111       /* Make sure that all edges have a clear 'aux' field.  */
1112       FOR_EACH_EDGE (e, ei, bb->preds)
1113 	{
1114 	  if (e->aux)
1115 	    {
1116 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1117 		      e->dest->index);
1118 	      goto err;
1119 	    }
1120 	}
1121 
1122       /* Verify the arguments for every PHI node in the block.  */
1123       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1124 	{
1125 	  gphi *phi = gsi.phi ();
1126 	  if (verify_phi_args (phi, bb, definition_block))
1127 	    goto err;
1128 
1129 	  bitmap_set_bit (names_defined_in_bb,
1130 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1131 	}
1132 
1133       /* Now verify all the uses and vuses in every statement of the block.  */
1134       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1135 	   gsi_next (&gsi))
1136 	{
1137 	  gimple *stmt = gsi_stmt (gsi);
1138 	  use_operand_p use_p;
1139 
1140 	  if (check_modified_stmt && gimple_modified_p (stmt))
1141 	    {
1142 	      error ("stmt (%p) marked modified after optimization pass: ",
1143 		     (void *)stmt);
1144 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1145 	      goto err;
1146 	    }
1147 
1148 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1149 	    {
1150 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1151 	      goto err;
1152 	    }
1153 
1154 	  if (gimple_debug_bind_p (stmt)
1155 	      && !gimple_debug_bind_has_value_p (stmt))
1156 	    continue;
1157 
1158 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1159 	    {
1160 	      op = USE_FROM_PTR (use_p);
1161 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1162 			      use_p, stmt, false, names_defined_in_bb))
1163 		goto err;
1164 	    }
1165 
1166 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1167 	    {
1168 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1169 		{
1170 		  error ("SSA_NAME_DEF_STMT is wrong");
1171 		  fprintf (stderr, "Expected definition statement:\n");
1172 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1173 		  fprintf (stderr, "\nActual definition statement:\n");
1174 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1175 				     4, TDF_VOPS);
1176 		  goto err;
1177 		}
1178 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1179 	    }
1180 	}
1181 
1182       bitmap_clear (names_defined_in_bb);
1183     }
1184 
1185   free (definition_block);
1186 
1187   if (gimple_vop (cfun)
1188       && ssa_default_def (cfun, gimple_vop (cfun)))
1189     {
1190       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1191       bitmap_clear (visited);
1192       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1193 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1194 	goto err;
1195     }
1196 
1197   /* Restore the dominance information to its prior known state, so
1198      that we do not perturb the compiler's subsequent behavior.  */
1199   if (orig_dom_state == DOM_NONE)
1200     free_dominance_info (CDI_DOMINATORS);
1201   else
1202     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1203 
1204   timevar_pop (TV_TREE_SSA_VERIFY);
1205   return;
1206 
1207 err:
1208   internal_error ("verify_ssa failed");
1209 }
1210 
1211 #if __GNUC__ >= 10
1212 #  pragma GCC diagnostic pop
1213 #endif
1214 
1215 /* Initialize global DFA and SSA structures.  */
1216 
1217 void
init_tree_ssa(struct function * fn)1218 init_tree_ssa (struct function *fn)
1219 {
1220   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1221   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1222   pt_solution_reset (&fn->gimple_df->escaped);
1223   init_ssanames (fn, 0);
1224 }
1225 
1226 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1227 
1228 void
delete_tree_ssa(struct function * fn)1229 delete_tree_ssa (struct function *fn)
1230 {
1231   fini_ssanames (fn);
1232 
1233   /* We no longer maintain the SSA operand cache at this point.  */
1234   if (ssa_operands_active (fn))
1235     fini_ssa_operands (fn);
1236 
1237   fn->gimple_df->default_defs->empty ();
1238   fn->gimple_df->default_defs = NULL;
1239   pt_solution_reset (&fn->gimple_df->escaped);
1240   if (fn->gimple_df->decls_to_pointers != NULL)
1241     delete fn->gimple_df->decls_to_pointers;
1242   fn->gimple_df->decls_to_pointers = NULL;
1243   fn->gimple_df = NULL;
1244 
1245   /* We no longer need the edge variable maps.  */
1246   redirect_edge_var_map_empty ();
1247 }
1248 
1249 /* Return true if EXPR is a useless type conversion, otherwise return
1250    false.  */
1251 
1252 bool
tree_ssa_useless_type_conversion(tree expr)1253 tree_ssa_useless_type_conversion (tree expr)
1254 {
1255   /* If we have an assignment that merely uses a NOP_EXPR to change
1256      the top of the RHS to the type of the LHS and the type conversion
1257      is "safe", then strip away the type conversion so that we can
1258      enter LHS = RHS into the const_and_copies table.  */
1259   if (CONVERT_EXPR_P (expr)
1260       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1261       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1262     return useless_type_conversion_p
1263       (TREE_TYPE (expr),
1264        TREE_TYPE (TREE_OPERAND (expr, 0)));
1265 
1266   return false;
1267 }
1268 
1269 /* Strip conversions from EXP according to
1270    tree_ssa_useless_type_conversion and return the resulting
1271    expression.  */
1272 
1273 tree
tree_ssa_strip_useless_type_conversions(tree exp)1274 tree_ssa_strip_useless_type_conversions (tree exp)
1275 {
1276   while (tree_ssa_useless_type_conversion (exp))
1277     exp = TREE_OPERAND (exp, 0);
1278   return exp;
1279 }
1280 
1281 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1282 
1283 bool
ssa_defined_default_def_p(tree t)1284 ssa_defined_default_def_p (tree t)
1285 {
1286   tree var = SSA_NAME_VAR (t);
1287 
1288   if (!var)
1289     ;
1290   /* Parameters get their initial value from the function entry.  */
1291   else if (TREE_CODE (var) == PARM_DECL)
1292     return true;
1293   /* When returning by reference the return address is actually a hidden
1294      parameter.  */
1295   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1296     return true;
1297   /* Hard register variables get their initial value from the ether.  */
1298   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1299     return true;
1300 
1301   return false;
1302 }
1303 
1304 
1305 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1306    should be returned if the value is only partially undefined.  */
1307 
1308 bool
ssa_undefined_value_p(tree t,bool partial)1309 ssa_undefined_value_p (tree t, bool partial)
1310 {
1311   gimple *def_stmt;
1312 
1313   if (ssa_defined_default_def_p (t))
1314     return false;
1315 
1316   /* The value is undefined iff its definition statement is empty.  */
1317   def_stmt = SSA_NAME_DEF_STMT (t);
1318   if (gimple_nop_p (def_stmt))
1319     return true;
1320 
1321   /* Check if the complex was not only partially defined.  */
1322   if (partial && is_gimple_assign (def_stmt)
1323       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1324     {
1325       tree rhs1, rhs2;
1326 
1327       rhs1 = gimple_assign_rhs1 (def_stmt);
1328       rhs2 = gimple_assign_rhs2 (def_stmt);
1329       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1330 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1331     }
1332   return false;
1333 }
1334 
1335 
1336 /* Return TRUE iff STMT, a gimple statement, references an undefined
1337    SSA name.  */
1338 
1339 bool
gimple_uses_undefined_value_p(gimple * stmt)1340 gimple_uses_undefined_value_p (gimple *stmt)
1341 {
1342   ssa_op_iter iter;
1343   tree op;
1344 
1345   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1346     if (ssa_undefined_value_p (op))
1347       return true;
1348 
1349   return false;
1350 }
1351 
1352 
1353 
1354 /* If necessary, rewrite the base of the reference tree *TP from
1355    a MEM_REF to a plain or converted symbol.  */
1356 
1357 static void
maybe_rewrite_mem_ref_base(tree * tp,bitmap suitable_for_renaming)1358 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1359 {
1360   tree sym;
1361 
1362   while (handled_component_p (*tp))
1363     tp = &TREE_OPERAND (*tp, 0);
1364   if (TREE_CODE (*tp) == MEM_REF
1365       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1366       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1367       && DECL_P (sym)
1368       && !TREE_ADDRESSABLE (sym)
1369       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1370       && is_gimple_reg_type (TREE_TYPE (*tp))
1371       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1372     {
1373       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1374 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1375 					TREE_TYPE (TREE_TYPE (sym)))
1376 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1377 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1378 	{
1379 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1380 			TYPE_SIZE (TREE_TYPE (*tp)),
1381 			int_const_binop (MULT_EXPR,
1382 					 bitsize_int (BITS_PER_UNIT),
1383 					 TREE_OPERAND (*tp, 1)));
1384 	}
1385       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1386 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1387 					     TREE_TYPE (TREE_TYPE (sym))))
1388 	{
1389 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1390 			? REALPART_EXPR : IMAGPART_EXPR,
1391 			TREE_TYPE (*tp), sym);
1392 	}
1393       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1394 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1395 	{
1396 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1397 					  TREE_TYPE (sym)))
1398 	    *tp = build1 (VIEW_CONVERT_EXPR,
1399 			  TREE_TYPE (*tp), sym);
1400 	  else
1401 	    *tp = sym;
1402 	}
1403       else if (DECL_SIZE (sym)
1404 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1405 	       && (known_subrange_p
1406 		   (mem_ref_offset (*tp),
1407 		    wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1408 		    0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1409 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1410 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1411 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1412 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1413 				  BITS_PER_UNIT) == 0)
1414 	{
1415 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1416 			TYPE_SIZE (TREE_TYPE (*tp)),
1417 			wide_int_to_tree (bitsizetype,
1418 					  mem_ref_offset (*tp)
1419 					  << LOG2_BITS_PER_UNIT));
1420 	}
1421     }
1422 }
1423 
1424 /* For a tree REF return its base if it is the base of a MEM_REF
1425    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1426 
1427 static tree
non_rewritable_mem_ref_base(tree ref)1428 non_rewritable_mem_ref_base (tree ref)
1429 {
1430   tree base;
1431 
1432   /* A plain decl does not need it set.  */
1433   if (DECL_P (ref))
1434     return NULL_TREE;
1435 
1436   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1437     {
1438       base = get_base_address (ref);
1439       if (DECL_P (base))
1440 	return base;
1441       return NULL_TREE;
1442     }
1443 
1444   /* But watch out for MEM_REFs we cannot lower to a
1445      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1446   if (TREE_CODE (base) == MEM_REF
1447       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1448     {
1449       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1450       if (! DECL_P (decl))
1451 	return NULL_TREE;
1452       if (! is_gimple_reg_type (TREE_TYPE (base))
1453 	  || VOID_TYPE_P (TREE_TYPE (base))
1454 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1455 	return decl;
1456       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1457 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1458 	  && useless_type_conversion_p (TREE_TYPE (base),
1459 					TREE_TYPE (TREE_TYPE (decl)))
1460 	  && known_ge (mem_ref_offset (base), 0)
1461 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1462 		       mem_ref_offset (base))
1463 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1464 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1465 	return NULL_TREE;
1466       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1467       if (integer_zerop (TREE_OPERAND (base, 1))
1468 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1469 	return NULL_TREE;
1470       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1471       if (DECL_SIZE (decl)
1472 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1473 	  && (known_subrange_p
1474 	      (mem_ref_offset (base),
1475 	       wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1476 	       0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1477 	  /* ???  We can't handle bitfield precision extracts without
1478 	     either using an alternate type for the BIT_FIELD_REF and
1479 	     then doing a conversion or possibly adjusting the offset
1480 	     according to endianness.  */
1481 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1482 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1483 		  == TYPE_PRECISION (TREE_TYPE (base))))
1484 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1485 			     BITS_PER_UNIT) == 0)
1486 	return NULL_TREE;
1487       return decl;
1488     }
1489 
1490   return NULL_TREE;
1491 }
1492 
1493 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1494    Otherwise return true.  */
1495 
1496 static bool
non_rewritable_lvalue_p(tree lhs)1497 non_rewritable_lvalue_p (tree lhs)
1498 {
1499   /* A plain decl is always rewritable.  */
1500   if (DECL_P (lhs))
1501     return false;
1502 
1503   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1504      a reasonably efficient manner... */
1505   if ((TREE_CODE (lhs) == REALPART_EXPR
1506        || TREE_CODE (lhs) == IMAGPART_EXPR)
1507       && DECL_P (TREE_OPERAND (lhs, 0)))
1508     return false;
1509 
1510   /* ???  The following could be relaxed allowing component
1511      references that do not change the access size.  */
1512   if (TREE_CODE (lhs) == MEM_REF
1513       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1514     {
1515       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1516 
1517       /* A decl that is wrapped inside a MEM-REF that covers
1518 	 it full is also rewritable.  */
1519       if (integer_zerop (TREE_OPERAND (lhs, 1))
1520 	  && DECL_P (decl)
1521 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1522 	  /* If the dynamic type of the decl has larger precision than
1523 	     the decl itself we can't use the decls type for SSA rewriting.  */
1524 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1525 	       || compare_tree_int (DECL_SIZE (decl),
1526 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1527 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1528 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1529 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1530 	  /* Make sure we are not re-writing non-float copying into float
1531 	     copying as that can incur normalization.  */
1532 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1533 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1534 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1535 	return false;
1536 
1537       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1538 	 using a BIT_INSERT_EXPR.  */
1539       if (DECL_P (decl)
1540 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1541 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1542 	  && known_ge (mem_ref_offset (lhs), 0)
1543 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1544 		       mem_ref_offset (lhs))
1545 	  && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1546 			    TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
1547 	  && known_ge (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (decl))),
1548 		       wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (lhs)))))
1549 	{
1550 	  poly_uint64 lhs_bits, nelts;
1551 	  if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)), &lhs_bits)
1552 	      && multiple_p (lhs_bits,
1553 			     tree_to_uhwi
1554 			       (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl)))),
1555 			     &nelts)
1556 	      && valid_vector_subparts_p (nelts))
1557 	    {
1558 	      if (known_eq (nelts, 1u))
1559 		return false;
1560 	      /* For sub-vector inserts the insert vector mode has to be
1561 		 supported.  */
1562 	      tree vtype = build_vector_type (TREE_TYPE (TREE_TYPE (decl)),
1563 					      nelts);
1564 	      if (TYPE_MODE (vtype) != BLKmode)
1565 		return false;
1566 	    }
1567 	}
1568     }
1569 
1570   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1571      BIT_INSERT_EXPR.  */
1572   if (TREE_CODE (lhs) == BIT_FIELD_REF
1573       && DECL_P (TREE_OPERAND (lhs, 0))
1574       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1575       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1576       && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1577 			  TYPE_SIZE_UNIT
1578 			    (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1579       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1580 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1581     return false;
1582 
1583   return true;
1584 }
1585 
1586 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1587    mark the variable VAR for conversion into SSA.  Return true when updating
1588    stmts is required.  */
1589 
1590 static void
maybe_optimize_var(tree var,bitmap addresses_taken,bitmap not_reg_needs,bitmap suitable_for_renaming)1591 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1592 		    bitmap suitable_for_renaming)
1593 {
1594   /* Global Variables, result decls cannot be changed.  */
1595   if (is_global_var (var)
1596       || TREE_CODE (var) == RESULT_DECL
1597       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1598     return;
1599 
1600   if (TREE_ADDRESSABLE (var)
1601       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1602 	 a non-register.  Otherwise we are confused and forget to
1603 	 add virtual operands for it.  */
1604       && (!is_gimple_reg_type (TREE_TYPE (var))
1605 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1606 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1607 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1608     {
1609       TREE_ADDRESSABLE (var) = 0;
1610       /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P
1611          is unset if we cannot rewrite the var into SSA.  */
1612       if ((TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1613 	   || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
1614 	  && bitmap_bit_p (not_reg_needs, DECL_UID (var)))
1615 	DECL_GIMPLE_REG_P (var) = 0;
1616       if (is_gimple_reg (var))
1617 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1618       if (dump_file)
1619 	{
1620 	  fprintf (dump_file, "No longer having address taken: ");
1621 	  print_generic_expr (dump_file, var);
1622 	  fprintf (dump_file, "\n");
1623 	}
1624     }
1625 
1626   if (!DECL_GIMPLE_REG_P (var)
1627       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1628       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1629 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1630       && !TREE_THIS_VOLATILE (var)
1631       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1632     {
1633       DECL_GIMPLE_REG_P (var) = 1;
1634       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1635       if (dump_file)
1636 	{
1637 	  fprintf (dump_file, "Now a gimple register: ");
1638 	  print_generic_expr (dump_file, var);
1639 	  fprintf (dump_file, "\n");
1640 	}
1641     }
1642 }
1643 
1644 /* Return true when STMT is ASAN mark where second argument is an address
1645    of a local variable.  */
1646 
1647 static bool
is_asan_mark_p(gimple * stmt)1648 is_asan_mark_p (gimple *stmt)
1649 {
1650   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1651     return false;
1652 
1653   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1654   if (TREE_CODE (addr) == ADDR_EXPR
1655       && VAR_P (TREE_OPERAND (addr, 0)))
1656     {
1657       tree var = TREE_OPERAND (addr, 0);
1658       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1659 			    DECL_ATTRIBUTES (var)))
1660 	return false;
1661 
1662       unsigned addressable = TREE_ADDRESSABLE (var);
1663       TREE_ADDRESSABLE (var) = 0;
1664       bool r = is_gimple_reg (var);
1665       TREE_ADDRESSABLE (var) = addressable;
1666       return r;
1667     }
1668 
1669   return false;
1670 }
1671 
1672 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1673 
1674 void
execute_update_addresses_taken(void)1675 execute_update_addresses_taken (void)
1676 {
1677   basic_block bb;
1678   auto_bitmap addresses_taken;
1679   auto_bitmap not_reg_needs;
1680   auto_bitmap suitable_for_renaming;
1681   tree var;
1682   unsigned i;
1683 
1684   timevar_push (TV_ADDRESS_TAKEN);
1685 
1686   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1687      the function body.  */
1688   FOR_EACH_BB_FN (bb, cfun)
1689     {
1690       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1691 	   gsi_next (&gsi))
1692 	{
1693 	  gimple *stmt = gsi_stmt (gsi);
1694 	  enum gimple_code code = gimple_code (stmt);
1695 	  tree decl;
1696 
1697 	  if (code == GIMPLE_CALL)
1698 	    {
1699 	      if (optimize_atomic_compare_exchange_p (stmt))
1700 		{
1701 		  /* For __atomic_compare_exchange_N if the second argument
1702 		     is &var, don't mark var addressable;
1703 		     if it becomes non-addressable, we'll rewrite it into
1704 		     ATOMIC_COMPARE_EXCHANGE call.  */
1705 		  tree arg = gimple_call_arg (stmt, 1);
1706 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1707 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1708 		  gimple_call_set_arg (stmt, 1, arg);
1709 		}
1710 	      else if (is_asan_mark_p (stmt)
1711 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1712 		;
1713 	      else
1714 		gimple_ior_addresses_taken (addresses_taken, stmt);
1715 	    }
1716 	  else
1717 	    /* Note all addresses taken by the stmt.  */
1718 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1719 
1720 	  /* If we have a call or an assignment, see if the lhs contains
1721 	     a local decl that requires not to be a gimple register.  */
1722 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1723 	    {
1724               tree lhs = gimple_get_lhs (stmt);
1725               if (lhs
1726 		  && TREE_CODE (lhs) != SSA_NAME
1727 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1728 		      || non_rewritable_lvalue_p (lhs)))
1729 		{
1730 		  decl = get_base_address (lhs);
1731 		  if (DECL_P (decl))
1732 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1733                 }
1734 	    }
1735 
1736 	  if (gimple_assign_single_p (stmt))
1737 	    {
1738 	      tree rhs = gimple_assign_rhs1 (stmt);
1739 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1740 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1741 	    }
1742 
1743 	  else if (code == GIMPLE_CALL)
1744 	    {
1745 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1746 		{
1747 		  tree arg = gimple_call_arg (stmt, i);
1748 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1749 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1750 		}
1751 	    }
1752 
1753 	  else if (code == GIMPLE_ASM)
1754 	    {
1755 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1756 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1757 		{
1758 		  tree link = gimple_asm_output_op (asm_stmt, i);
1759 		  tree lhs = TREE_VALUE (link);
1760 		  if (TREE_CODE (lhs) != SSA_NAME)
1761 		    {
1762 		      decl = get_base_address (lhs);
1763 		      if (DECL_P (decl)
1764 			  && (non_rewritable_lvalue_p (lhs)
1765 			      /* We cannot move required conversions from
1766 				 the lhs to the rhs in asm statements, so
1767 				 require we do not need any.  */
1768 			      || !useless_type_conversion_p
1769 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1770 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1771 		    }
1772 		}
1773 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1774 		{
1775 		  tree link = gimple_asm_input_op (asm_stmt, i);
1776 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1777 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1778 		}
1779 	    }
1780 	}
1781 
1782       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1783 	   gsi_next (&gsi))
1784 	{
1785 	  size_t i;
1786 	  gphi *phi = gsi.phi ();
1787 
1788 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1789 	    {
1790 	      tree op = PHI_ARG_DEF (phi, i), var;
1791 	      if (TREE_CODE (op) == ADDR_EXPR
1792 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1793 		  && DECL_P (var))
1794 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1795 	    }
1796 	}
1797     }
1798 
1799   /* We cannot iterate over all referenced vars because that can contain
1800      unused vars from BLOCK trees, which causes code generation differences
1801      for -g vs. -g0.  */
1802   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1803     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1804 			suitable_for_renaming);
1805 
1806   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1807     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1808 			suitable_for_renaming);
1809 
1810   /* Operand caches need to be recomputed for operands referencing the updated
1811      variables and operands need to be rewritten to expose bare symbols.  */
1812   if (!bitmap_empty_p (suitable_for_renaming))
1813     {
1814       FOR_EACH_BB_FN (bb, cfun)
1815 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1816 	  {
1817 	    gimple *stmt = gsi_stmt (gsi);
1818 
1819 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1820 	       rewrite into SSA form.  */
1821 	    if (gimple_assign_single_p (stmt))
1822 	      {
1823 		tree lhs = gimple_assign_lhs (stmt);
1824 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1825 		tree sym;
1826 
1827 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1828 		   gimplify_modify_expr_complex_part.  */
1829 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1830 		     || TREE_CODE (lhs) == REALPART_EXPR)
1831 		    && DECL_P (TREE_OPERAND (lhs, 0))
1832 		    && bitmap_bit_p (suitable_for_renaming,
1833 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1834 		  {
1835 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1836 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1837 					? REALPART_EXPR : IMAGPART_EXPR,
1838 					TREE_TYPE (other),
1839 					TREE_OPERAND (lhs, 0));
1840 		    gimple *load = gimple_build_assign (other, lrhs);
1841 		    location_t loc = gimple_location (stmt);
1842 		    gimple_set_location (load, loc);
1843 		    gimple_set_vuse (load, gimple_vuse (stmt));
1844 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1845 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1846 		    gimple_assign_set_rhs_with_ops
1847 		      (&gsi, COMPLEX_EXPR,
1848 		       TREE_CODE (lhs) == IMAGPART_EXPR
1849 		       ? other : gimple_assign_rhs1 (stmt),
1850 		       TREE_CODE (lhs) == IMAGPART_EXPR
1851 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1852 		    stmt = gsi_stmt (gsi);
1853 		    unlink_stmt_vdef (stmt);
1854 		    update_stmt (stmt);
1855 		    continue;
1856 		  }
1857 
1858 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1859 		   into a BIT_INSERT_EXPR.  */
1860 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1861 		    && DECL_P (TREE_OPERAND (lhs, 0))
1862 		    && bitmap_bit_p (suitable_for_renaming,
1863 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1864 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1865 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1866 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1867 					TYPE_SIZE_UNIT (TREE_TYPE
1868 					  (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1869 					0)
1870 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1871 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1872 		  {
1873 		    tree var = TREE_OPERAND (lhs, 0);
1874 		    tree val = gimple_assign_rhs1 (stmt);
1875 		    if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1876 					      TREE_TYPE (val)))
1877 		      {
1878 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1879 			gimple *pun
1880 			  = gimple_build_assign (tem,
1881 						 build1 (VIEW_CONVERT_EXPR,
1882 							 TREE_TYPE (tem), val));
1883 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1884 			val = tem;
1885 		      }
1886 		    tree bitpos = TREE_OPERAND (lhs, 2);
1887 		    gimple_assign_set_lhs (stmt, var);
1888 		    gimple_assign_set_rhs_with_ops
1889 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1890 		    stmt = gsi_stmt (gsi);
1891 		    unlink_stmt_vdef (stmt);
1892 		    update_stmt (stmt);
1893 		    continue;
1894 		  }
1895 
1896 		/* Rewrite a vector insert using a MEM_REF on the LHS
1897 		   into a BIT_INSERT_EXPR.  */
1898 		if (TREE_CODE (lhs) == MEM_REF
1899 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1900 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1901 		    && DECL_P (sym)
1902 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1903 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1904 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1905 		    /* If it is a full replacement we can do better below.  */
1906 		    && maybe_ne (wi::to_poly_offset
1907 				   (TYPE_SIZE_UNIT (TREE_TYPE (lhs))),
1908 				 wi::to_poly_offset
1909                                    (TYPE_SIZE_UNIT (TREE_TYPE (sym))))
1910 		    && known_ge (mem_ref_offset (lhs), 0)
1911 		    && known_gt (wi::to_poly_offset
1912 				   (TYPE_SIZE_UNIT (TREE_TYPE (sym))),
1913 				 mem_ref_offset (lhs))
1914 		    && multiple_of_p (sizetype,
1915 				      TREE_OPERAND (lhs, 1),
1916 				      TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1917 		  {
1918 		    tree val = gimple_assign_rhs1 (stmt);
1919 		    if (! types_compatible_p (TREE_TYPE (val),
1920 					      TREE_TYPE (TREE_TYPE (sym))))
1921 		      {
1922 			poly_uint64 lhs_bits, nelts;
1923 			tree temtype = TREE_TYPE (TREE_TYPE (sym));
1924 			if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)),
1925 					     &lhs_bits)
1926 			    && multiple_p (lhs_bits,
1927 					   tree_to_uhwi
1928 					     (TYPE_SIZE (TREE_TYPE
1929 							   (TREE_TYPE (sym)))),
1930 					   &nelts)
1931 			    && maybe_ne (nelts, 1u)
1932 			    && valid_vector_subparts_p (nelts))
1933 			  temtype = build_vector_type (temtype, nelts);
1934 			tree tem = make_ssa_name (temtype);
1935 			gimple *pun
1936 			  = gimple_build_assign (tem,
1937 						 build1 (VIEW_CONVERT_EXPR,
1938 							 TREE_TYPE (tem), val));
1939 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1940 			val = tem;
1941 		      }
1942 		    tree bitpos
1943 		      = wide_int_to_tree (bitsizetype,
1944 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1945 		    gimple_assign_set_lhs (stmt, sym);
1946 		    gimple_assign_set_rhs_with_ops
1947 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1948 		    stmt = gsi_stmt (gsi);
1949 		    unlink_stmt_vdef (stmt);
1950 		    update_stmt (stmt);
1951 		    continue;
1952 		  }
1953 
1954 		/* We shouldn't have any fancy wrapping of
1955 		   component-refs on the LHS, but look through
1956 		   VIEW_CONVERT_EXPRs as that is easy.  */
1957 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1958 		  lhs = TREE_OPERAND (lhs, 0);
1959 		if (TREE_CODE (lhs) == MEM_REF
1960 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1961 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1962 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1963 		    && DECL_P (sym)
1964 		    && !TREE_ADDRESSABLE (sym)
1965 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1966 		  lhs = sym;
1967 		else
1968 		  lhs = gimple_assign_lhs (stmt);
1969 
1970 		/* Rewrite the RHS and make sure the resulting assignment
1971 		   is validly typed.  */
1972 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1973 		rhs = gimple_assign_rhs1 (stmt);
1974 		if (gimple_assign_lhs (stmt) != lhs
1975 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1976 						   TREE_TYPE (rhs)))
1977 		  {
1978 		    if (gimple_clobber_p (stmt))
1979 		      {
1980 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1981 			TREE_THIS_VOLATILE (rhs) = 1;
1982 		      }
1983 		    else
1984 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1985 					 TREE_TYPE (lhs), rhs);
1986 		  }
1987 		if (gimple_assign_lhs (stmt) != lhs)
1988 		  gimple_assign_set_lhs (stmt, lhs);
1989 
1990 		if (gimple_assign_rhs1 (stmt) != rhs)
1991 		  {
1992 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1993 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1994 		  }
1995 	      }
1996 
1997 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1998 	      {
1999 		unsigned i;
2000 		if (optimize_atomic_compare_exchange_p (stmt))
2001 		  {
2002 		    tree expected = gimple_call_arg (stmt, 1);
2003 		    if (bitmap_bit_p (suitable_for_renaming,
2004 				      DECL_UID (TREE_OPERAND (expected, 0))))
2005 		      {
2006 			fold_builtin_atomic_compare_exchange (&gsi);
2007 			continue;
2008 		      }
2009 		  }
2010 		else if (is_asan_mark_p (stmt))
2011 		  {
2012 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
2013 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2014 		      {
2015 			unlink_stmt_vdef (stmt);
2016 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
2017 			  {
2018 			    gcall *call
2019 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
2020 			    gimple_call_set_lhs (call, var);
2021 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
2022 			  }
2023 			else
2024 			  {
2025 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
2026 			       is uninitialized.  Avoid dependencies on
2027 			       previous out of scope value.  */
2028 			    tree clobber = build_clobber (TREE_TYPE (var));
2029 			    gimple *g = gimple_build_assign (var, clobber);
2030 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
2031 			  }
2032 			continue;
2033 		      }
2034 		  }
2035 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
2036 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
2037 		    {
2038 		      tree *argp = gimple_call_arg_ptr (stmt, i);
2039 		      if (*argp == null_pointer_node)
2040 			continue;
2041 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
2042 				  && VAR_P (TREE_OPERAND (*argp, 0)));
2043 		      tree var = TREE_OPERAND (*argp, 0);
2044 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2045 			*argp = null_pointer_node;
2046 		    }
2047 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
2048 		  {
2049 		    tree *argp = gimple_call_arg_ptr (stmt, i);
2050 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
2051 		  }
2052 	      }
2053 
2054 	    else if (gimple_code (stmt) == GIMPLE_ASM)
2055 	      {
2056 		gasm *asm_stmt = as_a <gasm *> (stmt);
2057 		unsigned i;
2058 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
2059 		  {
2060 		    tree link = gimple_asm_output_op (asm_stmt, i);
2061 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2062 						suitable_for_renaming);
2063 		  }
2064 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2065 		  {
2066 		    tree link = gimple_asm_input_op (asm_stmt, i);
2067 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2068 						suitable_for_renaming);
2069 		  }
2070 	      }
2071 
2072 	    else if (gimple_debug_bind_p (stmt)
2073 		     && gimple_debug_bind_has_value_p (stmt))
2074 	      {
2075 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2076 		tree decl;
2077 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2078 		decl = non_rewritable_mem_ref_base (*valuep);
2079 		if (decl
2080 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2081 		  gimple_debug_bind_reset_value (stmt);
2082 	      }
2083 
2084 	    if (gimple_references_memory_p (stmt)
2085 		|| is_gimple_debug (stmt))
2086 	      update_stmt (stmt);
2087 
2088 	    gsi_next (&gsi);
2089 	  }
2090 
2091       /* Update SSA form here, we are called as non-pass as well.  */
2092       if (number_of_loops (cfun) > 1
2093 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2094 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2095       else
2096 	update_ssa (TODO_update_ssa);
2097     }
2098 
2099   timevar_pop (TV_ADDRESS_TAKEN);
2100 }
2101 
2102 namespace {
2103 
2104 const pass_data pass_data_update_address_taken =
2105 {
2106   GIMPLE_PASS, /* type */
2107   "addressables", /* name */
2108   OPTGROUP_NONE, /* optinfo_flags */
2109   TV_ADDRESS_TAKEN, /* tv_id */
2110   PROP_ssa, /* properties_required */
2111   0, /* properties_provided */
2112   0, /* properties_destroyed */
2113   0, /* todo_flags_start */
2114   TODO_update_address_taken, /* todo_flags_finish */
2115 };
2116 
2117 class pass_update_address_taken : public gimple_opt_pass
2118 {
2119 public:
pass_update_address_taken(gcc::context * ctxt)2120   pass_update_address_taken (gcc::context *ctxt)
2121     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2122   {}
2123 
2124   /* opt_pass methods: */
2125 
2126 }; // class pass_update_address_taken
2127 
2128 } // anon namespace
2129 
2130 gimple_opt_pass *
make_pass_update_address_taken(gcc::context * ctxt)2131 make_pass_update_address_taken (gcc::context *ctxt)
2132 {
2133   return new pass_update_address_taken (ctxt);
2134 }
2135