1 /* Basic IPA optimizations and utilities.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "gimplify.h"
32 #include "tree-iterator.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
35 #include "tree-vrp.h"
36 #include "ipa-prop.h"
37 #include "ipa-fnsummary.h"
38 #include "dbgcnt.h"
39 #include "debug.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42 
43 /* Return true when NODE has ADDR reference.  */
44 
45 static bool
has_addr_references_p(struct cgraph_node * node,void *)46 has_addr_references_p (struct cgraph_node *node,
47 		       void *)
48 {
49   int i;
50   struct ipa_ref *ref = NULL;
51 
52   for (i = 0; node->iterate_referring (i, ref); i++)
53     if (ref->use == IPA_REF_ADDR)
54       return true;
55   return false;
56 }
57 
58 /* Return true when NODE can be target of an indirect call.  */
59 
60 static bool
is_indirect_call_target_p(struct cgraph_node * node,void *)61 is_indirect_call_target_p (struct cgraph_node *node, void *)
62 {
63   return node->indirect_call_target;
64 }
65 
66 /* Look for all functions inlined to NODE and update their inlined_to pointers
67    to INLINED_TO.  */
68 
69 static void
update_inlined_to_pointer(struct cgraph_node * node,struct cgraph_node * inlined_to)70 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71 {
72   struct cgraph_edge *e;
73   for (e = node->callees; e; e = e->next_callee)
74     if (e->callee->global.inlined_to)
75       {
76         e->callee->global.inlined_to = inlined_to;
77 	update_inlined_to_pointer (e->callee, inlined_to);
78       }
79 }
80 
81 /* Add symtab NODE to queue starting at FIRST.
82 
83    The queue is linked via AUX pointers and terminated by pointer to 1.
84    We enqueue nodes at two occasions: when we find them reachable or when we find
85    their bodies needed for further clonning.  In the second case we mark them
86    by pointer to 2 after processing so they are re-queue when they become
87    reachable.  */
88 
89 static void
enqueue_node(symtab_node * node,symtab_node ** first,hash_set<symtab_node * > * reachable)90 enqueue_node (symtab_node *node, symtab_node **first,
91 	      hash_set<symtab_node *> *reachable)
92 {
93   /* Node is still in queue; do nothing.  */
94   if (node->aux && node->aux != (void *) 2)
95     return;
96   /* Node was already processed as unreachable, re-enqueue
97      only if it became reachable now.  */
98   if (node->aux == (void *)2 && !reachable->contains (node))
99     return;
100   node->aux = *first;
101   *first = node;
102 }
103 
104 /* Process references.  */
105 
106 static void
process_references(symtab_node * snode,symtab_node ** first,bool before_inlining_p,hash_set<symtab_node * > * reachable)107 process_references (symtab_node *snode,
108 		    symtab_node **first,
109 		    bool before_inlining_p,
110 		    hash_set<symtab_node *> *reachable)
111 {
112   int i;
113   struct ipa_ref *ref = NULL;
114   for (i = 0; snode->iterate_reference (i, ref); i++)
115     {
116       symtab_node *node = ref->referred;
117       symtab_node *body = node->ultimate_alias_target ();
118 
119       if (node->definition && !node->in_other_partition
120 	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
121 	      || (((before_inlining_p
122 		    && (TREE_CODE (node->decl) != FUNCTION_DECL
123 			|| (TREE_CODE (node->decl) == FUNCTION_DECL
124 			    && opt_for_fn (body->decl, optimize))
125 		        || (symtab->state < IPA_SSA
126 		            && lookup_attribute
127 				 ("always_inline",
128 			          DECL_ATTRIBUTES (body->decl))))))
129 		  /* We use variable constructors during late compilation for
130 		     constant folding.  Keep references alive so partitioning
131 		     knows about potential references.  */
132 		  || (VAR_P (node->decl)
133 		      && flag_wpa
134 		      && ctor_for_folding (node->decl)
135 		         != error_mark_node))))
136 	{
137 	  /* Be sure that we will not optimize out alias target
138 	     body.  */
139 	  if (DECL_EXTERNAL (node->decl)
140 	      && node->alias
141 	      && before_inlining_p)
142 	    reachable->add (body);
143 	  reachable->add (node);
144 	}
145       enqueue_node (node, first, reachable);
146     }
147 }
148 
149 /* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
150    all its potential targets as reachable to permit later inlining if
151    devirtualization happens.  After inlining still keep their declarations
152    around, so we can devirtualize to a direct call.
153 
154    Also try to make trivial devirutalization when no or only one target is
155    possible.  */
156 
157 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,struct cgraph_edge * edge,symtab_node ** first,hash_set<symtab_node * > * reachable,bool before_inlining_p)158 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
159 			       struct cgraph_edge *edge,
160 			       symtab_node **first,
161 			       hash_set<symtab_node *> *reachable,
162 			       bool before_inlining_p)
163 {
164   unsigned int i;
165   void *cache_token;
166   bool final;
167   vec <cgraph_node *>targets
168     = possible_polymorphic_call_targets
169 	(edge, &final, &cache_token);
170 
171   if (!reachable_call_targets->add (cache_token))
172     {
173       for (i = 0; i < targets.length (); i++)
174 	{
175 	  struct cgraph_node *n = targets[i];
176 
177 	  /* Do not bother to mark virtual methods in anonymous namespace;
178 	     either we will find use of virtual table defining it, or it is
179 	     unused.  */
180 	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
181 	      && type_in_anonymous_namespace_p
182 		    (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
183 	    continue;
184 
185 	  n->indirect_call_target = true;
186 	  symtab_node *body = n->function_symbol ();
187 
188 	  /* Prior inlining, keep alive bodies of possible targets for
189 	     devirtualization.  */
190 	  if (n->definition
191 	      && (before_inlining_p
192 		  && opt_for_fn (body->decl, optimize)
193 		  && opt_for_fn (body->decl, flag_devirtualize)))
194 	     {
195 		/* Be sure that we will not optimize out alias target
196 		   body.  */
197 		if (DECL_EXTERNAL (n->decl)
198 		    && n->alias
199 		    && before_inlining_p)
200 		  reachable->add (body);
201 	       reachable->add (n);
202 	     }
203 	  /* Even after inlining we want to keep the possible targets in the
204 	     boundary, so late passes can still produce direct call even if
205 	     the chance for inlining is lost.  */
206 	  enqueue_node (n, first, reachable);
207 	}
208     }
209 
210   /* Very trivial devirtualization; when the type is
211      final or anonymous (so we know all its derivation)
212      and there is only one possible virtual call target,
213      make the edge direct.  */
214   if (final)
215     {
216       if (targets.length () <= 1 && dbg_cnt (devirt))
217 	{
218 	  cgraph_node *target, *node = edge->caller;
219 	  if (targets.length () == 1)
220 	    target = targets[0];
221 	  else
222 	    target = cgraph_node::get_create
223 		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
224 
225 	  if (dump_enabled_p ())
226             {
227 	      location_t locus;
228 	      if (edge->call_stmt)
229 		locus = gimple_location (edge->call_stmt);
230 	      else
231 		locus = UNKNOWN_LOCATION;
232 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
233 			       "devirtualizing call in %s to %s\n",
234 			       edge->caller->dump_name (),
235 			       target->dump_name ());
236 	    }
237 	  edge = edge->make_direct (target);
238 	  if (ipa_fn_summaries)
239 	    ipa_update_overall_fn_summary (node);
240 	  else if (edge->call_stmt)
241 	    {
242 	      edge->redirect_call_stmt_to_callee ();
243 
244 	      /* Call to __builtin_unreachable shouldn't be instrumented.  */
245 	      if (!targets.length ())
246 		gimple_call_set_with_bounds (edge->call_stmt, false);
247 	    }
248 	}
249     }
250 }
251 
252 /* Perform reachability analysis and reclaim all unreachable nodes.
253 
254    The algorithm is basically mark&sweep but with some extra refinements:
255 
256    - reachable extern inline functions needs special handling; the bodies needs
257      to stay in memory until inlining in hope that they will be inlined.
258      After inlining we release their bodies and turn them into unanalyzed
259      nodes even when they are reachable.
260 
261    - virtual functions are kept in callgraph even if they seem unreachable in
262      hope calls to them will be devirtualized.
263 
264      Again we remove them after inlining.  In late optimization some
265      devirtualization may happen, but it is not important since we won't inline
266      the call. In theory early opts and IPA should work out all important cases.
267 
268    - virtual clones needs bodies of their origins for later materialization;
269      this means that we want to keep the body even if the origin is unreachable
270      otherwise.  To avoid origin from sitting in the callgraph and being
271      walked by IPA passes, we turn them into unanalyzed nodes with body
272      defined.
273 
274      We maintain set of function declaration where body needs to stay in
275      body_needed_for_clonning
276 
277      Inline clones represent special case: their declaration match the
278      declaration of origin and cgraph_remove_node already knows how to
279      reshape callgraph and preserve body when offline copy of function or
280      inline clone is being removed.
281 
282    - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
283      variables with DECL_INITIAL set.  We finalize these and keep reachable
284      ones around for constant folding purposes.  After inlining we however
285      stop walking their references to let everything static referneced by them
286      to be removed when it is otherwise unreachable.
287 
288    We maintain queue of both reachable symbols (i.e. defined symbols that needs
289    to stay) and symbols that are in boundary (i.e. external symbols referenced
290    by reachable symbols or origins of clones).  The queue is represented
291    as linked list by AUX pointer terminated by 1.
292 
293    At the end we keep all reachable symbols. For symbols in boundary we always
294    turn definition into a declaration, but we may keep function body around
295    based on body_needed_for_clonning
296 
297    All symbols that enter the queue have AUX pointer non-zero and are in the
298    boundary.  Pointer set REACHABLE is used to track reachable symbols.
299 
300    Every symbol can be visited twice - once as part of boundary and once
301    as real reachable symbol. enqueue_node needs to decide whether the
302    node needs to be re-queued for second processing.  For this purpose
303    we set AUX pointer of processed symbols in the boundary to constant 2.  */
304 
305 bool
remove_unreachable_nodes(FILE * file)306 symbol_table::remove_unreachable_nodes (FILE *file)
307 {
308   symtab_node *first = (symtab_node *) (void *) 1;
309   struct cgraph_node *node, *next;
310   varpool_node *vnode, *vnext;
311   bool changed = false;
312   hash_set<symtab_node *> reachable;
313   hash_set<tree> body_needed_for_clonning;
314   hash_set<void *> reachable_call_targets;
315   bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
316 					    : IPA_SSA_AFTER_INLINING);
317 
318   timevar_push (TV_IPA_UNREACHABLE);
319   build_type_inheritance_graph ();
320   if (file)
321     fprintf (file, "\nReclaiming functions:");
322   if (flag_checking)
323     {
324       FOR_EACH_FUNCTION (node)
325 	gcc_assert (!node->aux);
326       FOR_EACH_VARIABLE (vnode)
327 	gcc_assert (!vnode->aux);
328     }
329   /* Mark functions whose bodies are obviously needed.
330      This is mostly when they can be referenced externally.  Inline clones
331      are special since their declarations are shared with master clone and thus
332      cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
333   FOR_EACH_FUNCTION (node)
334     {
335       node->used_as_abstract_origin = false;
336       node->indirect_call_target = false;
337       if (node->definition
338 	  && !node->global.inlined_to
339 	  && !node->in_other_partition
340 	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
341 	{
342 	  gcc_assert (!node->global.inlined_to);
343 	  reachable.add (node);
344 	  enqueue_node (node, &first, &reachable);
345 	}
346       else
347 	gcc_assert (!node->aux);
348      }
349 
350   /* Mark variables that are obviously needed.  */
351   FOR_EACH_DEFINED_VARIABLE (vnode)
352     if (!vnode->can_remove_if_no_refs_p()
353 	&& !vnode->in_other_partition)
354       {
355 	reachable.add (vnode);
356 	enqueue_node (vnode, &first, &reachable);
357       }
358 
359   /* Perform reachability analysis.  */
360   while (first != (symtab_node *) (void *) 1)
361     {
362       bool in_boundary_p = !reachable.contains (first);
363       symtab_node *node = first;
364 
365       first = (symtab_node *)first->aux;
366 
367       /* If we are processing symbol in boundary, mark its AUX pointer for
368 	 possible later re-processing in enqueue_node.  */
369       if (in_boundary_p)
370 	{
371 	  node->aux = (void *)2;
372 	  if (node->alias && node->analyzed)
373 	    enqueue_node (node->get_alias_target (), &first, &reachable);
374 	}
375       else
376 	{
377 	  if (TREE_CODE (node->decl) == FUNCTION_DECL
378 	      && DECL_ABSTRACT_ORIGIN (node->decl))
379 	    {
380 	      struct cgraph_node *origin_node
381 	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
382 	      if (origin_node && !origin_node->used_as_abstract_origin)
383 		{
384 	          origin_node->used_as_abstract_origin = true;
385 		  gcc_assert (!origin_node->prev_sibling_clone);
386 		  gcc_assert (!origin_node->next_sibling_clone);
387 		  for (cgraph_node *n = origin_node->clones; n;
388 		       n = n->next_sibling_clone)
389 		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
390 		      n->used_as_abstract_origin = true;
391 		}
392 	    }
393 	  /* If any symbol in a comdat group is reachable, force
394 	     all externally visible symbols in the same comdat
395 	     group to be reachable as well.  Comdat-local symbols
396 	     can be discarded if all uses were inlined.  */
397 	  if (node->same_comdat_group)
398 	    {
399 	      symtab_node *next;
400 	      for (next = node->same_comdat_group;
401 		   next != node;
402 		   next = next->same_comdat_group)
403 		if (!next->comdat_local_p ()
404 		    && !reachable.add (next))
405 		  enqueue_node (next, &first, &reachable);
406 	    }
407 	  /* Mark references as reachable.  */
408 	  process_references (node, &first, before_inlining_p, &reachable);
409 	}
410 
411       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
412 	{
413 	  /* Mark the callees reachable unless they are direct calls to extern
414  	     inline functions we decided to not inline.  */
415 	  if (!in_boundary_p)
416 	    {
417 	      struct cgraph_edge *e;
418 	      /* Keep alive possible targets for devirtualization.  */
419 	      if (opt_for_fn (cnode->decl, optimize)
420 		  && opt_for_fn (cnode->decl, flag_devirtualize))
421 		{
422 		  struct cgraph_edge *next;
423 		  for (e = cnode->indirect_calls; e; e = next)
424 		    {
425 		      next = e->next_callee;
426 		      if (e->indirect_info->polymorphic)
427 			walk_polymorphic_call_targets (&reachable_call_targets,
428 						       e, &first, &reachable,
429 						       before_inlining_p);
430 		    }
431 		}
432 	      for (e = cnode->callees; e; e = e->next_callee)
433 		{
434 	          symtab_node *body = e->callee->function_symbol ();
435 		  if (e->callee->definition
436 		      && !e->callee->in_other_partition
437 		      && (!e->inline_failed
438 			  || !DECL_EXTERNAL (e->callee->decl)
439 			  || e->callee->alias
440 			  || (before_inlining_p
441 			      && (opt_for_fn (body->decl, optimize)
442 		                  || (symtab->state < IPA_SSA
443 		                      && lookup_attribute
444 				          ("always_inline",
445 				           DECL_ATTRIBUTES (body->decl)))))))
446 		    {
447 		      /* Be sure that we will not optimize out alias target
448 			 body.  */
449 		      if (DECL_EXTERNAL (e->callee->decl)
450 			  && e->callee->alias
451 			  && before_inlining_p)
452 			reachable.add (body);
453 		      reachable.add (e->callee);
454 		    }
455 		  enqueue_node (e->callee, &first, &reachable);
456 		}
457 
458 	      /* When inline clone exists, mark body to be preserved so when removing
459 		 offline copy of the function we don't kill it.  */
460 	      if (cnode->global.inlined_to)
461 	        body_needed_for_clonning.add (cnode->decl);
462 
463 	      /* For instrumentation clones we always need original
464 		 function node for proper LTO privatization.  */
465 	      if (cnode->instrumentation_clone
466 		  && cnode->definition)
467 		{
468 		  gcc_assert (cnode->instrumented_version || in_lto_p);
469 		  if (cnode->instrumented_version)
470 		    {
471 		      enqueue_node (cnode->instrumented_version, &first,
472 				    &reachable);
473 		      reachable.add (cnode->instrumented_version);
474 		    }
475 		}
476 
477 	      /* For non-inline clones, force their origins to the boundary and ensure
478 		 that body is not removed.  */
479 	      while (cnode->clone_of)
480 		{
481 		  bool noninline = cnode->clone_of->decl != cnode->decl;
482 		  cnode = cnode->clone_of;
483 		  if (noninline)
484 		    {
485 		      body_needed_for_clonning.add (cnode->decl);
486 		      enqueue_node (cnode, &first, &reachable);
487 		    }
488 		}
489 
490 	    }
491 	  else if (cnode->thunk.thunk_p)
492 	    enqueue_node (cnode->callees->callee, &first, &reachable);
493 
494 	  /* If any reachable function has simd clones, mark them as
495 	     reachable as well.  */
496 	  if (cnode->simd_clones)
497 	    {
498 	      cgraph_node *next;
499 	      for (next = cnode->simd_clones;
500 		   next;
501 		   next = next->simdclone->next_clone)
502 		if (in_boundary_p
503 		    || !reachable.add (next))
504 		  enqueue_node (next, &first, &reachable);
505 	    }
506 	}
507       /* When we see constructor of external variable, keep referred nodes in the
508 	boundary.  This will also hold initializers of the external vars NODE
509 	refers to.  */
510       varpool_node *vnode = dyn_cast <varpool_node *> (node);
511       if (vnode
512 	  && DECL_EXTERNAL (node->decl)
513 	  && !vnode->alias
514 	  && in_boundary_p)
515 	{
516 	  struct ipa_ref *ref = NULL;
517 	  for (int i = 0; node->iterate_reference (i, ref); i++)
518 	    enqueue_node (ref->referred, &first, &reachable);
519 	}
520     }
521 
522   /* Remove unreachable functions.   */
523   for (node = first_function (); node; node = next)
524     {
525       next = next_function (node);
526 
527       /* If node is not needed at all, remove it.  */
528       if (!node->aux)
529 	{
530 	  if (file)
531 	    fprintf (file, " %s", node->dump_name ());
532 	  node->remove ();
533 	  changed = true;
534 	}
535       /* If node is unreachable, remove its body.  */
536       else if (!reachable.contains (node))
537         {
538 	  /* We keep definitions of thunks and aliases in the boundary so
539 	     we can walk to the ultimate alias targets and function symbols
540 	     reliably.  */
541 	  if (node->alias || node->thunk.thunk_p)
542 	    ;
543 	  else if (!body_needed_for_clonning.contains (node->decl)
544 	      && !node->alias && !node->thunk.thunk_p)
545 	    node->release_body ();
546 	  else if (!node->clone_of)
547 	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
548 	  if (node->definition && !node->alias && !node->thunk.thunk_p)
549 	    {
550 	      if (file)
551 		fprintf (file, " %s", node->dump_name ());
552 	      node->body_removed = true;
553 	      node->analyzed = false;
554 	      node->definition = false;
555 	      node->cpp_implicit_alias = false;
556 	      node->alias = false;
557 	      node->transparent_alias = false;
558 	      node->thunk.thunk_p = false;
559 	      node->weakref = false;
560 	      /* After early inlining we drop always_inline attributes on
561 		 bodies of functions that are still referenced (have their
562 		 address taken).  */
563 	      DECL_ATTRIBUTES (node->decl)
564 		= remove_attribute ("always_inline",
565 				    DECL_ATTRIBUTES (node->decl));
566 	      if (!node->in_other_partition)
567 		node->local.local = false;
568 	      node->remove_callees ();
569 	      node->remove_all_references ();
570 	      changed = true;
571 	      if (node->thunk.thunk_p
572 		  && node->thunk.add_pointer_bounds_args)
573 		{
574 		  node->thunk.thunk_p = false;
575 		  node->thunk.add_pointer_bounds_args = false;
576 		}
577 	    }
578 	}
579       else
580 	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
581 		    || in_lto_p || DECL_RESULT (node->decl));
582     }
583 
584   /* Inline clones might be kept around so their materializing allows further
585      cloning.  If the function the clone is inlined into is removed, we need
586      to turn it into normal cone.  */
587   FOR_EACH_FUNCTION (node)
588     {
589       if (node->global.inlined_to
590 	  && !node->callers)
591 	{
592 	  gcc_assert (node->clones);
593 	  node->global.inlined_to = NULL;
594 	  update_inlined_to_pointer (node, node);
595 	}
596       node->aux = NULL;
597     }
598 
599   /* Remove unreachable variables.  */
600   if (file)
601     fprintf (file, "\nReclaiming variables:");
602   for (vnode = first_variable (); vnode; vnode = vnext)
603     {
604       vnext = next_variable (vnode);
605       if (!vnode->aux
606 	  /* For can_refer_decl_in_current_unit_p we want to track for
607 	     all external variables if they are defined in other partition
608 	     or not.  */
609 	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
610 	{
611 	  struct ipa_ref *ref = NULL;
612 
613 	  /* First remove the aliases, so varpool::remove can possibly lookup
614 	     the constructor and save it for future use.  */
615 	  while (vnode->iterate_direct_aliases (0, ref))
616 	    {
617 	      if (file)
618 		fprintf (file, " %s", ref->referred->dump_name ());
619 	      ref->referring->remove ();
620 	    }
621 	  if (file)
622 	    fprintf (file, " %s", vnode->dump_name ());
623           vnext = next_variable (vnode);
624 	  /* Signal removal to the debug machinery.  */
625 	  if (! flag_wpa)
626 	    {
627 	      vnode->definition = false;
628 	      (*debug_hooks->late_global_decl) (vnode->decl);
629 	    }
630 	  vnode->remove ();
631 	  changed = true;
632 	}
633       else if (!reachable.contains (vnode) && !vnode->alias)
634         {
635 	  tree init;
636 	  if (vnode->definition)
637 	    {
638 	      if (file)
639 		fprintf (file, " %s", vnode->name ());
640 	      changed = true;
641 	    }
642 	  /* Keep body if it may be useful for constant folding.  */
643 	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
644 	      && !POINTER_BOUNDS_P (vnode->decl))
645 	    vnode->remove_initializer ();
646 	  else
647 	    DECL_INITIAL (vnode->decl) = init;
648 	  vnode->body_removed = true;
649 	  vnode->definition = false;
650 	  vnode->analyzed = false;
651 	  vnode->aux = NULL;
652 
653 	  vnode->remove_from_same_comdat_group ();
654 
655 	  vnode->remove_all_references ();
656 	}
657       else
658 	vnode->aux = NULL;
659     }
660 
661   /* Now update address_taken flags and try to promote functions to be local.  */
662   if (file)
663     fprintf (file, "\nClearing address taken flags:");
664   FOR_EACH_DEFINED_FUNCTION (node)
665     if (node->address_taken
666 	&& !node->used_from_other_partition)
667       {
668 	if (!node->call_for_symbol_and_aliases
669 	    (has_addr_references_p, NULL, true)
670 	    && (!node->instrumentation_clone
671 		|| !node->instrumented_version
672 		|| !node->instrumented_version->address_taken))
673 	  {
674 	    if (file)
675 	      fprintf (file, " %s", node->name ());
676 	    node->address_taken = false;
677 	    changed = true;
678 	    if (node->local_p ()
679 		/* Virtual functions may be kept in cgraph just because
680 		   of possible later devirtualization.  Do not mark them as
681 		   local too early so we won't optimize them out before
682 		   we are done with polymorphic call analysis.  */
683 		&& (!before_inlining_p
684 		    || !node->call_for_symbol_and_aliases
685 		       (is_indirect_call_target_p, NULL, true)))
686 	      {
687 		node->local.local = true;
688 		if (file)
689 		  fprintf (file, " (local)");
690 	      }
691 	  }
692       }
693   if (file)
694     fprintf (file, "\n");
695 
696   symtab_node::checking_verify_symtab_nodes ();
697 
698   /* If we removed something, perhaps profile could be improved.  */
699   if (changed && (optimize || in_lto_p) && ipa_call_summaries)
700     FOR_EACH_DEFINED_FUNCTION (node)
701       ipa_propagate_frequency (node);
702 
703   timevar_pop (TV_IPA_UNREACHABLE);
704   return changed;
705 }
706 
707 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
708    as needed, also clear EXPLICIT_REFS if the references to given variable
709    do not need to be explicit.  */
710 
711 void
process_references(varpool_node * vnode,bool * written,bool * address_taken,bool * read,bool * explicit_refs)712 process_references (varpool_node *vnode,
713 		    bool *written, bool *address_taken,
714 		    bool *read, bool *explicit_refs)
715 {
716   int i;
717   struct ipa_ref *ref;
718 
719   if (!vnode->all_refs_explicit_p ()
720       || TREE_THIS_VOLATILE (vnode->decl))
721     *explicit_refs = false;
722 
723   for (i = 0; vnode->iterate_referring (i, ref)
724 	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
725     switch (ref->use)
726       {
727       case IPA_REF_ADDR:
728 	*address_taken = true;
729 	break;
730       case IPA_REF_LOAD:
731 	*read = true;
732 	break;
733       case IPA_REF_STORE:
734 	*written = true;
735 	break;
736       case IPA_REF_ALIAS:
737 	process_references (dyn_cast<varpool_node *> (ref->referring), written,
738 			    address_taken, read, explicit_refs);
739 	break;
740       case IPA_REF_CHKP:
741 	gcc_unreachable ();
742       }
743 }
744 
745 /* Set TREE_READONLY bit.  */
746 
747 bool
set_readonly_bit(varpool_node * vnode,void * data ATTRIBUTE_UNUSED)748 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
749 {
750   TREE_READONLY (vnode->decl) = true;
751   return false;
752 }
753 
754 /* Set writeonly bit and clear the initalizer, since it will not be needed.  */
755 
756 bool
set_writeonly_bit(varpool_node * vnode,void * data)757 set_writeonly_bit (varpool_node *vnode, void *data)
758 {
759   vnode->writeonly = true;
760   if (optimize || in_lto_p)
761     {
762       DECL_INITIAL (vnode->decl) = NULL;
763       if (!vnode->alias)
764 	{
765 	  if (vnode->num_references ())
766 	    *(bool *)data = true;
767 	  vnode->remove_all_references ();
768 	}
769     }
770   return false;
771 }
772 
773 /* Clear addressale bit of VNODE.  */
774 
775 bool
clear_addressable_bit(varpool_node * vnode,void * data ATTRIBUTE_UNUSED)776 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
777 {
778   vnode->address_taken = false;
779   TREE_ADDRESSABLE (vnode->decl) = 0;
780   return false;
781 }
782 
783 /* Discover variables that have no longer address taken or that are read only
784    and update their flags.
785 
786    Return true when unreachable symbol removan should be done.
787 
788    FIXME: This can not be done in between gimplify and omp_expand since
789    readonly flag plays role on what is shared and what is not.  Currently we do
790    this transformation as part of whole program visibility and re-do at
791    ipa-reference pass (to take into account clonning), but it would
792    make sense to do it before early optimizations.  */
793 
794 bool
ipa_discover_readonly_nonaddressable_vars(void)795 ipa_discover_readonly_nonaddressable_vars (void)
796 {
797   bool remove_p = false;
798   varpool_node *vnode;
799   if (dump_file)
800     fprintf (dump_file, "Clearing variable flags:");
801   FOR_EACH_VARIABLE (vnode)
802     if (!vnode->alias
803 	&& (TREE_ADDRESSABLE (vnode->decl)
804 	    || !vnode->writeonly
805 	    || !TREE_READONLY (vnode->decl)))
806       {
807 	bool written = false;
808 	bool address_taken = false;
809 	bool read = false;
810 	bool explicit_refs = true;
811 
812 	process_references (vnode, &written, &address_taken, &read,
813 			    &explicit_refs);
814 	if (!explicit_refs)
815 	  continue;
816 	if (!address_taken)
817 	  {
818 	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
819 	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
820 	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
821 					        true);
822 	  }
823 	if (!address_taken && !written
824 	    /* Making variable in explicit section readonly can cause section
825 	       type conflict.
826 	       See e.g. gcc.c-torture/compile/pr23237.c */
827 	    && vnode->get_section () == NULL)
828 	  {
829 	    if (!TREE_READONLY (vnode->decl) && dump_file)
830 	      fprintf (dump_file, " %s (read-only)", vnode->name ());
831 	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
832 	  }
833 	if (!vnode->writeonly && !read && !address_taken && written)
834 	  {
835 	    if (dump_file)
836 	      fprintf (dump_file, " %s (write-only)", vnode->name ());
837 	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
838 					        true);
839 	  }
840       }
841   if (dump_file)
842     fprintf (dump_file, "\n");
843   return remove_p;
844 }
845 
846 /* Generate and emit a static constructor or destructor.  WHICH must
847    be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
848    (for chp static vars constructor) or 'B' (for chkp static bounds
849    constructor).  BODY is a STATEMENT_LIST containing GENERIC
850    statements.  PRIORITY is the initialization priority for this
851    constructor or destructor.
852 
853    FINAL specify whether the externally visible name for collect2 should
854    be produced. */
855 
856 static void
cgraph_build_static_cdtor_1(char which,tree body,int priority,bool final,tree optimization,tree target)857 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
858 			     tree optimization,
859 			     tree target)
860 {
861   static int counter = 0;
862   char which_buf[16];
863   tree decl, name, resdecl;
864 
865   /* The priority is encoded in the constructor or destructor name.
866      collect2 will sort the names and arrange that they are called at
867      program startup.  */
868   if (final)
869     sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
870   else
871   /* Proudce sane name but one not recognizable by collect2, just for the
872      case we fail to inline the function.  */
873     sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
874   name = get_file_function_name (which_buf);
875 
876   decl = build_decl (input_location, FUNCTION_DECL, name,
877 		     build_function_type_list (void_type_node, NULL_TREE));
878   current_function_decl = decl;
879 
880   resdecl = build_decl (input_location,
881 			RESULT_DECL, NULL_TREE, void_type_node);
882   DECL_ARTIFICIAL (resdecl) = 1;
883   DECL_RESULT (decl) = resdecl;
884   DECL_CONTEXT (resdecl) = decl;
885 
886   allocate_struct_function (decl, false);
887 
888   TREE_STATIC (decl) = 1;
889   TREE_USED (decl) = 1;
890   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
891   DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
892   DECL_ARTIFICIAL (decl) = 1;
893   DECL_IGNORED_P (decl) = 1;
894   DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
895   DECL_SAVED_TREE (decl) = body;
896   if (!targetm.have_ctors_dtors && final)
897     {
898       TREE_PUBLIC (decl) = 1;
899       DECL_PRESERVE_P (decl) = 1;
900     }
901   DECL_UNINLINABLE (decl) = 1;
902 
903   DECL_INITIAL (decl) = make_node (BLOCK);
904   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
905   TREE_USED (DECL_INITIAL (decl)) = 1;
906 
907   DECL_SOURCE_LOCATION (decl) = input_location;
908   cfun->function_end_locus = input_location;
909 
910   switch (which)
911     {
912     case 'I':
913       DECL_STATIC_CONSTRUCTOR (decl) = 1;
914       decl_init_priority_insert (decl, priority);
915       break;
916     case 'P':
917       DECL_STATIC_CONSTRUCTOR (decl) = 1;
918       DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
919 					  NULL,
920 					  NULL_TREE);
921       decl_init_priority_insert (decl, priority);
922       break;
923     case 'B':
924       DECL_STATIC_CONSTRUCTOR (decl) = 1;
925       DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
926 					  NULL,
927 					  NULL_TREE);
928       decl_init_priority_insert (decl, priority);
929       break;
930     case 'D':
931       DECL_STATIC_DESTRUCTOR (decl) = 1;
932       decl_fini_priority_insert (decl, priority);
933       break;
934     default:
935       gcc_unreachable ();
936     }
937 
938   gimplify_function_tree (decl);
939 
940   cgraph_node::add_new_function (decl, false);
941 
942   set_cfun (NULL);
943   current_function_decl = NULL;
944 }
945 
946 /* Generate and emit a static constructor or destructor.  WHICH must
947    be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
948    (for chkp static vars constructor) or 'B' (for chkp static bounds
949    constructor).  BODY is a STATEMENT_LIST containing GENERIC
950    statements.  PRIORITY is the initialization priority for this
951    constructor or destructor.  */
952 
953 void
cgraph_build_static_cdtor(char which,tree body,int priority)954 cgraph_build_static_cdtor (char which, tree body, int priority)
955 {
956   cgraph_build_static_cdtor_1 (which, body, priority, false, NULL, NULL);
957 }
958 
959 /* When target does not have ctors and dtors, we call all constructor
960    and destructor by special initialization/destruction function
961    recognized by collect2.
962 
963    When we are going to build this function, collect all constructors and
964    destructors and turn them into normal functions.  */
965 
966 static void
record_cdtor_fn(struct cgraph_node * node,vec<tree> * ctors,vec<tree> * dtors)967 record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
968 {
969   if (DECL_STATIC_CONSTRUCTOR (node->decl))
970     ctors->safe_push (node->decl);
971   if (DECL_STATIC_DESTRUCTOR (node->decl))
972     dtors->safe_push (node->decl);
973   node = cgraph_node::get (node->decl);
974   DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
975 }
976 
977 /* Define global constructors/destructor functions for the CDTORS, of
978    which they are LEN.  The CDTORS are sorted by initialization
979    priority.  If CTOR_P is true, these are constructors; otherwise,
980    they are destructors.  */
981 
982 static void
build_cdtor(bool ctor_p,const vec<tree> & cdtors)983 build_cdtor (bool ctor_p, const vec<tree> &cdtors)
984 {
985   size_t i,j;
986   size_t len = cdtors.length ();
987 
988   i = 0;
989   while (i < len)
990     {
991       tree body;
992       tree fn;
993       priority_type priority;
994 
995       priority = 0;
996       body = NULL_TREE;
997       j = i;
998       do
999 	{
1000 	  priority_type p;
1001 	  fn = cdtors[j];
1002 	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1003 	  if (j == i)
1004 	    priority = p;
1005 	  else if (p != priority)
1006 	    break;
1007 	  j++;
1008 	}
1009       while (j < len);
1010 
1011       /* When there is only one cdtor and target supports them, do nothing.  */
1012       if (j == i + 1
1013 	  && targetm.have_ctors_dtors)
1014 	{
1015 	  i++;
1016 	  continue;
1017 	}
1018       /* Find the next batch of constructors/destructors with the same
1019 	 initialization priority.  */
1020       for (;i < j; i++)
1021 	{
1022 	  tree call;
1023 	  fn = cdtors[i];
1024 	  call = build_call_expr (fn, 0);
1025 	  if (ctor_p)
1026 	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
1027 	  else
1028 	    DECL_STATIC_DESTRUCTOR (fn) = 0;
1029 	  /* We do not want to optimize away pure/const calls here.
1030 	     When optimizing, these should be already removed, when not
1031 	     optimizing, we want user to be able to breakpoint in them.  */
1032 	  TREE_SIDE_EFFECTS (call) = 1;
1033 	  append_to_statement_list (call, &body);
1034 	}
1035       gcc_assert (body != NULL_TREE);
1036       /* Generate a function to call all the function of like
1037 	 priority.  */
1038       cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
1039 				   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
1040 				   DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
1041     }
1042 }
1043 
1044 /* Comparison function for qsort.  P1 and P2 are actually of type
1045    "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
1046    used to determine the sort order.  */
1047 
1048 static int
compare_ctor(const void * p1,const void * p2)1049 compare_ctor (const void *p1, const void *p2)
1050 {
1051   tree f1;
1052   tree f2;
1053   int priority1;
1054   int priority2;
1055 
1056   f1 = *(const tree *)p1;
1057   f2 = *(const tree *)p2;
1058   priority1 = DECL_INIT_PRIORITY (f1);
1059   priority2 = DECL_INIT_PRIORITY (f2);
1060 
1061   if (priority1 < priority2)
1062     return -1;
1063   else if (priority1 > priority2)
1064     return 1;
1065   else
1066     /* Ensure a stable sort.  Constructors are executed in backwarding
1067        order to make LTO initialize braries first.  */
1068     return DECL_UID (f2) - DECL_UID (f1);
1069 }
1070 
1071 /* Comparison function for qsort.  P1 and P2 are actually of type
1072    "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
1073    used to determine the sort order.  */
1074 
1075 static int
compare_dtor(const void * p1,const void * p2)1076 compare_dtor (const void *p1, const void *p2)
1077 {
1078   tree f1;
1079   tree f2;
1080   int priority1;
1081   int priority2;
1082 
1083   f1 = *(const tree *)p1;
1084   f2 = *(const tree *)p2;
1085   priority1 = DECL_FINI_PRIORITY (f1);
1086   priority2 = DECL_FINI_PRIORITY (f2);
1087 
1088   if (priority1 < priority2)
1089     return -1;
1090   else if (priority1 > priority2)
1091     return 1;
1092   else
1093     /* Ensure a stable sort.  */
1094     return DECL_UID (f1) - DECL_UID (f2);
1095 }
1096 
1097 /* Generate functions to call static constructors and destructors
1098    for targets that do not support .ctors/.dtors sections.  These
1099    functions have magic names which are detected by collect2.  */
1100 
1101 static void
build_cdtor_fns(vec<tree> * ctors,vec<tree> * dtors)1102 build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1103 {
1104   if (!ctors->is_empty ())
1105     {
1106       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1107       ctors->qsort (compare_ctor);
1108       build_cdtor (/*ctor_p=*/true, *ctors);
1109     }
1110 
1111   if (!dtors->is_empty ())
1112     {
1113       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1114       dtors->qsort (compare_dtor);
1115       build_cdtor (/*ctor_p=*/false, *dtors);
1116     }
1117 }
1118 
1119 /* Look for constructors and destructors and produce function calling them.
1120    This is needed for targets not supporting ctors or dtors, but we perform the
1121    transformation also at linktime to merge possibly numerous
1122    constructors/destructors into single function to improve code locality and
1123    reduce size.  */
1124 
1125 static unsigned int
ipa_cdtor_merge(void)1126 ipa_cdtor_merge (void)
1127 {
1128   /* A vector of FUNCTION_DECLs declared as static constructors.  */
1129   auto_vec<tree, 20> ctors;
1130   /* A vector of FUNCTION_DECLs declared as static destructors.  */
1131   auto_vec<tree, 20> dtors;
1132   struct cgraph_node *node;
1133   FOR_EACH_DEFINED_FUNCTION (node)
1134     if (DECL_STATIC_CONSTRUCTOR (node->decl)
1135 	|| DECL_STATIC_DESTRUCTOR (node->decl))
1136        record_cdtor_fn (node, &ctors, &dtors);
1137   build_cdtor_fns (&ctors, &dtors);
1138   return 0;
1139 }
1140 
1141 namespace {
1142 
1143 const pass_data pass_data_ipa_cdtor_merge =
1144 {
1145   IPA_PASS, /* type */
1146   "cdtor", /* name */
1147   OPTGROUP_NONE, /* optinfo_flags */
1148   TV_CGRAPHOPT, /* tv_id */
1149   0, /* properties_required */
1150   0, /* properties_provided */
1151   0, /* properties_destroyed */
1152   0, /* todo_flags_start */
1153   0, /* todo_flags_finish */
1154 };
1155 
1156 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1157 {
1158 public:
pass_ipa_cdtor_merge(gcc::context * ctxt)1159   pass_ipa_cdtor_merge (gcc::context *ctxt)
1160     : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1161 		      NULL, /* generate_summary */
1162 		      NULL, /* write_summary */
1163 		      NULL, /* read_summary */
1164 		      NULL, /* write_optimization_summary */
1165 		      NULL, /* read_optimization_summary */
1166 		      NULL, /* stmt_fixup */
1167 		      0, /* function_transform_todo_flags_start */
1168 		      NULL, /* function_transform */
1169 		      NULL) /* variable_transform */
1170   {}
1171 
1172   /* opt_pass methods: */
1173   virtual bool gate (function *);
execute(function *)1174   virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1175 
1176 }; // class pass_ipa_cdtor_merge
1177 
1178 bool
gate(function *)1179 pass_ipa_cdtor_merge::gate (function *)
1180 {
1181   /* Perform the pass when we have no ctors/dtors support
1182      or at LTO time to merge multiple constructors into single
1183      function.  */
1184   return !targetm.have_ctors_dtors || in_lto_p;
1185 }
1186 
1187 } // anon namespace
1188 
1189 ipa_opt_pass_d *
make_pass_ipa_cdtor_merge(gcc::context * ctxt)1190 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1191 {
1192   return new pass_ipa_cdtor_merge (ctxt);
1193 }
1194 
1195 /* Invalid pointer representing BOTTOM for single user dataflow.  */
1196 #define BOTTOM ((cgraph_node *)(size_t) 2)
1197 
1198 /* Meet operation for single user dataflow.
1199    Here we want to associate variables with sigle function that may access it.
1200 
1201    FUNCTION is current single user of a variable, VAR is variable that uses it.
1202    Latttice is stored in SINGLE_USER_MAP.
1203 
1204    We represent:
1205     - TOP by no entry in SIGNLE_USER_MAP
1206     - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1207     - known single user by cgraph pointer in SINGLE_USER_MAP.  */
1208 
1209 cgraph_node *
meet(cgraph_node * function,varpool_node * var,hash_map<varpool_node *,cgraph_node * > & single_user_map)1210 meet (cgraph_node *function, varpool_node *var,
1211        hash_map<varpool_node *, cgraph_node *> &single_user_map)
1212 {
1213   struct cgraph_node *user, **f;
1214 
1215   if (var->aux == BOTTOM)
1216     return BOTTOM;
1217 
1218   f = single_user_map.get (var);
1219   if (!f)
1220     return function;
1221   user = *f;
1222   if (!function)
1223     return user;
1224   else if (function != user)
1225     return BOTTOM;
1226   else
1227     return function;
1228 }
1229 
1230 /* Propagation step of single-use dataflow.
1231 
1232    Check all uses of VNODE and see if they are used by single function FUNCTION.
1233    SINGLE_USER_MAP represents the dataflow lattice.  */
1234 
1235 cgraph_node *
propagate_single_user(varpool_node * vnode,cgraph_node * function,hash_map<varpool_node *,cgraph_node * > & single_user_map)1236 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1237 		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1238 {
1239   int i;
1240   struct ipa_ref *ref;
1241 
1242   gcc_assert (!vnode->externally_visible);
1243 
1244   /* If node is an alias, first meet with its target.  */
1245   if (vnode->alias)
1246     function = meet (function, vnode->get_alias_target (), single_user_map);
1247 
1248   /* Check all users and see if they correspond to a single function.  */
1249   for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1250     {
1251       struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1252       if (cnode)
1253 	{
1254 	  if (cnode->global.inlined_to)
1255 	    cnode = cnode->global.inlined_to;
1256 	  if (!function)
1257 	    function = cnode;
1258 	  else if (function != cnode)
1259 	    function = BOTTOM;
1260 	}
1261       else
1262 	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1263 			 single_user_map);
1264     }
1265   return function;
1266 }
1267 
1268 /* Pass setting used_by_single_function flag.
1269    This flag is set on variable when there is only one function that may
1270    possibly referr to it.  */
1271 
1272 static unsigned int
ipa_single_use(void)1273 ipa_single_use (void)
1274 {
1275   varpool_node *first = (varpool_node *) (void *) 1;
1276   varpool_node *var;
1277   hash_map<varpool_node *, cgraph_node *> single_user_map;
1278 
1279   FOR_EACH_DEFINED_VARIABLE (var)
1280     if (!var->all_refs_explicit_p ())
1281       var->aux = BOTTOM;
1282     else
1283       {
1284 	/* Enqueue symbol for dataflow.  */
1285         var->aux = first;
1286 	first = var;
1287       }
1288 
1289   /* The actual dataflow.  */
1290 
1291   while (first != (void *) 1)
1292     {
1293       cgraph_node *user, *orig_user, **f;
1294 
1295       var = first;
1296       first = (varpool_node *)first->aux;
1297 
1298       f = single_user_map.get (var);
1299       if (f)
1300 	orig_user = *f;
1301       else
1302 	orig_user = NULL;
1303       user = propagate_single_user (var, orig_user, single_user_map);
1304 
1305       gcc_checking_assert (var->aux != BOTTOM);
1306 
1307       /* If user differs, enqueue all references.  */
1308       if (user != orig_user)
1309 	{
1310 	  unsigned int i;
1311 	  ipa_ref *ref;
1312 
1313 	  single_user_map.put (var, user);
1314 
1315 	  /* Enqueue all aliases for re-processing.  */
1316 	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1317 	    if (!ref->referring->aux)
1318 	      {
1319 		ref->referring->aux = first;
1320 		first = dyn_cast <varpool_node *> (ref->referring);
1321 	      }
1322 	  /* Enqueue all users for re-processing.  */
1323 	  for (i = 0; var->iterate_reference (i, ref); i++)
1324 	    if (!ref->referred->aux
1325 	        && ref->referred->definition
1326 		&& is_a <varpool_node *> (ref->referred))
1327 	      {
1328 		ref->referred->aux = first;
1329 		first = dyn_cast <varpool_node *> (ref->referred);
1330 	      }
1331 
1332 	  /* If user is BOTTOM, just punt on this var.  */
1333 	  if (user == BOTTOM)
1334 	    var->aux = BOTTOM;
1335 	  else
1336 	    var->aux = NULL;
1337 	}
1338       else
1339 	var->aux = NULL;
1340     }
1341 
1342   FOR_EACH_DEFINED_VARIABLE (var)
1343     {
1344       if (var->aux != BOTTOM)
1345 	{
1346 	  /* Not having the single user known means that the VAR is
1347 	     unreachable.  Either someone forgot to remove unreachable
1348 	     variables or the reachability here is wrong.  */
1349 
1350 	  gcc_checking_assert (single_user_map.get (var));
1351 
1352 	  if (dump_file)
1353 	    {
1354 	      fprintf (dump_file, "Variable %s is used by single function\n",
1355 		       var->dump_name ());
1356 	    }
1357 	  var->used_by_single_function = true;
1358 	}
1359       var->aux = NULL;
1360     }
1361   return 0;
1362 }
1363 
1364 namespace {
1365 
1366 const pass_data pass_data_ipa_single_use =
1367 {
1368   IPA_PASS, /* type */
1369   "single-use", /* name */
1370   OPTGROUP_NONE, /* optinfo_flags */
1371   TV_CGRAPHOPT, /* tv_id */
1372   0, /* properties_required */
1373   0, /* properties_provided */
1374   0, /* properties_destroyed */
1375   0, /* todo_flags_start */
1376   0, /* todo_flags_finish */
1377 };
1378 
1379 class pass_ipa_single_use : public ipa_opt_pass_d
1380 {
1381 public:
pass_ipa_single_use(gcc::context * ctxt)1382   pass_ipa_single_use (gcc::context *ctxt)
1383     : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1384 		      NULL, /* generate_summary */
1385 		      NULL, /* write_summary */
1386 		      NULL, /* read_summary */
1387 		      NULL, /* write_optimization_summary */
1388 		      NULL, /* read_optimization_summary */
1389 		      NULL, /* stmt_fixup */
1390 		      0, /* function_transform_todo_flags_start */
1391 		      NULL, /* function_transform */
1392 		      NULL) /* variable_transform */
1393   {}
1394 
1395   /* opt_pass methods: */
execute(function *)1396   virtual unsigned int execute (function *) { return ipa_single_use (); }
1397 
1398 }; // class pass_ipa_single_use
1399 
1400 } // anon namespace
1401 
1402 ipa_opt_pass_d *
make_pass_ipa_single_use(gcc::context * ctxt)1403 make_pass_ipa_single_use (gcc::context *ctxt)
1404 {
1405   return new pass_ipa_single_use (ctxt);
1406 }
1407 
1408 /* Materialize all clones.  */
1409 
1410 namespace {
1411 
1412 const pass_data pass_data_materialize_all_clones =
1413 {
1414   SIMPLE_IPA_PASS, /* type */
1415   "materialize-all-clones", /* name */
1416   OPTGROUP_NONE, /* optinfo_flags */
1417   TV_IPA_OPT, /* tv_id */
1418   0, /* properties_required */
1419   0, /* properties_provided */
1420   0, /* properties_destroyed */
1421   0, /* todo_flags_start */
1422   0, /* todo_flags_finish */
1423 };
1424 
1425 class pass_materialize_all_clones : public simple_ipa_opt_pass
1426 {
1427 public:
pass_materialize_all_clones(gcc::context * ctxt)1428   pass_materialize_all_clones (gcc::context *ctxt)
1429     : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1430   {}
1431 
1432   /* opt_pass methods: */
execute(function *)1433   virtual unsigned int execute (function *)
1434     {
1435       symtab->materialize_all_clones ();
1436       return 0;
1437     }
1438 
1439 }; // class pass_materialize_all_clones
1440 
1441 } // anon namespace
1442 
1443 simple_ipa_opt_pass *
make_pass_materialize_all_clones(gcc::context * ctxt)1444 make_pass_materialize_all_clones (gcc::context *ctxt)
1445 {
1446   return new pass_materialize_all_clones (ctxt);
1447 }
1448