1 /* Basic IPA optimizations and utilities.
2    Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "gimplify.h"
32 #include "tree-iterator.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
35 #include "tree-vrp.h"
36 #include "ipa-prop.h"
37 #include "ipa-fnsummary.h"
38 #include "dbgcnt.h"
39 #include "debug.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42 
43 /* Return true when NODE has ADDR reference.  */
44 
45 static bool
has_addr_references_p(struct cgraph_node * node,void *)46 has_addr_references_p (struct cgraph_node *node,
47 		       void *)
48 {
49   int i;
50   struct ipa_ref *ref = NULL;
51 
52   for (i = 0; node->iterate_referring (i, ref); i++)
53     if (ref->use == IPA_REF_ADDR)
54       return true;
55   return false;
56 }
57 
58 /* Return true when NODE can be target of an indirect call.  */
59 
60 static bool
is_indirect_call_target_p(struct cgraph_node * node,void *)61 is_indirect_call_target_p (struct cgraph_node *node, void *)
62 {
63   return node->indirect_call_target;
64 }
65 
66 /* Look for all functions inlined to NODE and update their inlined_to pointers
67    to INLINED_TO.  */
68 
69 static void
update_inlined_to_pointer(struct cgraph_node * node,struct cgraph_node * inlined_to)70 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71 {
72   struct cgraph_edge *e;
73   for (e = node->callees; e; e = e->next_callee)
74     if (e->callee->global.inlined_to)
75       {
76         e->callee->global.inlined_to = inlined_to;
77 	update_inlined_to_pointer (e->callee, inlined_to);
78       }
79 }
80 
81 /* Add symtab NODE to queue starting at FIRST.
82 
83    The queue is linked via AUX pointers and terminated by pointer to 1.
84    We enqueue nodes at two occasions: when we find them reachable or when we find
85    their bodies needed for further clonning.  In the second case we mark them
86    by pointer to 2 after processing so they are re-queue when they become
87    reachable.  */
88 
89 static void
enqueue_node(symtab_node * node,symtab_node ** first,hash_set<symtab_node * > * reachable)90 enqueue_node (symtab_node *node, symtab_node **first,
91 	      hash_set<symtab_node *> *reachable)
92 {
93   /* Node is still in queue; do nothing.  */
94   if (node->aux && node->aux != (void *) 2)
95     return;
96   /* Node was already processed as unreachable, re-enqueue
97      only if it became reachable now.  */
98   if (node->aux == (void *)2 && !reachable->contains (node))
99     return;
100   node->aux = *first;
101   *first = node;
102 }
103 
104 /* Return true if NODE may get inlined later.
105    This is used to keep DECL_EXTERNAL function bodies around long enough
106    so inliner can proces them.  */
107 
108 static bool
possible_inline_candidate_p(symtab_node * node)109 possible_inline_candidate_p (symtab_node *node)
110 {
111   if (symtab->state >= IPA_SSA_AFTER_INLINING)
112     return false;
113   cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
114   if (!cnode)
115     return false;
116   if (DECL_UNINLINABLE (cnode->decl))
117     return false;
118   if (opt_for_fn (cnode->decl, optimize))
119     return true;
120   if (symtab->state >= IPA_SSA)
121     return false;
122   return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
123 }
124 
125 /* Process references.  */
126 
127 static void
process_references(symtab_node * snode,symtab_node ** first,hash_set<symtab_node * > * reachable)128 process_references (symtab_node *snode,
129 		    symtab_node **first,
130 		    hash_set<symtab_node *> *reachable)
131 {
132   int i;
133   struct ipa_ref *ref = NULL;
134   for (i = 0; snode->iterate_reference (i, ref); i++)
135     {
136       symtab_node *node = ref->referred;
137       symtab_node *body = node->ultimate_alias_target ();
138 
139       if (node->definition && !node->in_other_partition
140 	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
141 	      || (possible_inline_candidate_p (node)
142 		  /* We use variable constructors during late compilation for
143 		     constant folding.  Keep references alive so partitioning
144 		     knows about potential references.  */
145 		  || (VAR_P (node->decl)
146 		      && (flag_wpa
147 			  || flag_incremental_link
148 			 	 == INCREMENTAL_LINK_LTO)
149 		      && dyn_cast <varpool_node *> (node)
150 		      	   ->ctor_useable_for_folding_p ()))))
151 	{
152 	  /* Be sure that we will not optimize out alias target
153 	     body.  */
154 	  if (DECL_EXTERNAL (node->decl)
155 	      && node->alias
156 	      && symtab->state < IPA_SSA_AFTER_INLINING)
157 	    reachable->add (body);
158 	  reachable->add (node);
159 	}
160       enqueue_node (node, first, reachable);
161     }
162 }
163 
164 /* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
165    all its potential targets as reachable to permit later inlining if
166    devirtualization happens.  After inlining still keep their declarations
167    around, so we can devirtualize to a direct call.
168 
169    Also try to make trivial devirutalization when no or only one target is
170    possible.  */
171 
172 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,struct cgraph_edge * edge,symtab_node ** first,hash_set<symtab_node * > * reachable)173 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
174 			       struct cgraph_edge *edge,
175 			       symtab_node **first,
176 			       hash_set<symtab_node *> *reachable)
177 {
178   unsigned int i;
179   void *cache_token;
180   bool final;
181   vec <cgraph_node *>targets
182     = possible_polymorphic_call_targets
183 	(edge, &final, &cache_token);
184 
185   if (!reachable_call_targets->add (cache_token))
186     {
187       for (i = 0; i < targets.length (); i++)
188 	{
189 	  struct cgraph_node *n = targets[i];
190 
191 	  /* Do not bother to mark virtual methods in anonymous namespace;
192 	     either we will find use of virtual table defining it, or it is
193 	     unused.  */
194 	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
195 	      && type_in_anonymous_namespace_p
196 		    (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
197 	    continue;
198 
199 	  n->indirect_call_target = true;
200 	  symtab_node *body = n->function_symbol ();
201 
202 	  /* Prior inlining, keep alive bodies of possible targets for
203 	     devirtualization.  */
204 	  if (n->definition
205 	      && (possible_inline_candidate_p (body)
206 		  && opt_for_fn (body->decl, flag_devirtualize)))
207 	     {
208 		/* Be sure that we will not optimize out alias target
209 		   body.  */
210 		if (DECL_EXTERNAL (n->decl)
211 		    && n->alias
212 		    && symtab->state < IPA_SSA_AFTER_INLINING)
213 		  reachable->add (body);
214 	       reachable->add (n);
215 	     }
216 	  /* Even after inlining we want to keep the possible targets in the
217 	     boundary, so late passes can still produce direct call even if
218 	     the chance for inlining is lost.  */
219 	  enqueue_node (n, first, reachable);
220 	}
221     }
222 
223   /* Very trivial devirtualization; when the type is
224      final or anonymous (so we know all its derivation)
225      and there is only one possible virtual call target,
226      make the edge direct.  */
227   if (final)
228     {
229       if (targets.length () <= 1 && dbg_cnt (devirt))
230 	{
231 	  cgraph_node *target, *node = edge->caller;
232 	  if (targets.length () == 1)
233 	    target = targets[0];
234 	  else
235 	    target = cgraph_node::get_create
236 		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
237 
238 	  if (dump_enabled_p ())
239 	    {
240 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
241 			       "devirtualizing call in %s to %s\n",
242 			       edge->caller->dump_name (),
243 			       target->dump_name ());
244 	    }
245 	  edge = edge->make_direct (target);
246 	  if (ipa_fn_summaries)
247 	    ipa_update_overall_fn_summary (node);
248 	  else if (edge->call_stmt)
249 	    edge->redirect_call_stmt_to_callee ();
250 	}
251     }
252 }
253 
254 /* Perform reachability analysis and reclaim all unreachable nodes.
255 
256    The algorithm is basically mark&sweep but with some extra refinements:
257 
258    - reachable extern inline functions needs special handling; the bodies needs
259      to stay in memory until inlining in hope that they will be inlined.
260      After inlining we release their bodies and turn them into unanalyzed
261      nodes even when they are reachable.
262 
263    - virtual functions are kept in callgraph even if they seem unreachable in
264      hope calls to them will be devirtualized.
265 
266      Again we remove them after inlining.  In late optimization some
267      devirtualization may happen, but it is not important since we won't inline
268      the call. In theory early opts and IPA should work out all important cases.
269 
270    - virtual clones needs bodies of their origins for later materialization;
271      this means that we want to keep the body even if the origin is unreachable
272      otherwise.  To avoid origin from sitting in the callgraph and being
273      walked by IPA passes, we turn them into unanalyzed nodes with body
274      defined.
275 
276      We maintain set of function declaration where body needs to stay in
277      body_needed_for_clonning
278 
279      Inline clones represent special case: their declaration match the
280      declaration of origin and cgraph_remove_node already knows how to
281      reshape callgraph and preserve body when offline copy of function or
282      inline clone is being removed.
283 
284    - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
285      variables with DECL_INITIAL set.  We finalize these and keep reachable
286      ones around for constant folding purposes.  After inlining we however
287      stop walking their references to let everything static referneced by them
288      to be removed when it is otherwise unreachable.
289 
290    We maintain queue of both reachable symbols (i.e. defined symbols that needs
291    to stay) and symbols that are in boundary (i.e. external symbols referenced
292    by reachable symbols or origins of clones).  The queue is represented
293    as linked list by AUX pointer terminated by 1.
294 
295    At the end we keep all reachable symbols. For symbols in boundary we always
296    turn definition into a declaration, but we may keep function body around
297    based on body_needed_for_clonning
298 
299    All symbols that enter the queue have AUX pointer non-zero and are in the
300    boundary.  Pointer set REACHABLE is used to track reachable symbols.
301 
302    Every symbol can be visited twice - once as part of boundary and once
303    as real reachable symbol. enqueue_node needs to decide whether the
304    node needs to be re-queued for second processing.  For this purpose
305    we set AUX pointer of processed symbols in the boundary to constant 2.  */
306 
307 bool
remove_unreachable_nodes(FILE * file)308 symbol_table::remove_unreachable_nodes (FILE *file)
309 {
310   symtab_node *first = (symtab_node *) (void *) 1;
311   struct cgraph_node *node, *next;
312   varpool_node *vnode, *vnext;
313   bool changed = false;
314   hash_set<symtab_node *> reachable;
315   hash_set<tree> body_needed_for_clonning;
316   hash_set<void *> reachable_call_targets;
317 
318   timevar_push (TV_IPA_UNREACHABLE);
319   build_type_inheritance_graph ();
320   if (file)
321     fprintf (file, "\nReclaiming functions:");
322   if (flag_checking)
323     {
324       FOR_EACH_FUNCTION (node)
325 	gcc_assert (!node->aux);
326       FOR_EACH_VARIABLE (vnode)
327 	gcc_assert (!vnode->aux);
328     }
329   /* Mark functions whose bodies are obviously needed.
330      This is mostly when they can be referenced externally.  Inline clones
331      are special since their declarations are shared with master clone and thus
332      cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
333   FOR_EACH_FUNCTION (node)
334     {
335       node->used_as_abstract_origin = false;
336       node->indirect_call_target = false;
337       if (node->definition
338 	  && !node->global.inlined_to
339 	  && !node->in_other_partition
340 	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
341 	{
342 	  gcc_assert (!node->global.inlined_to);
343 	  reachable.add (node);
344 	  enqueue_node (node, &first, &reachable);
345 	}
346       else
347 	gcc_assert (!node->aux);
348      }
349 
350   /* Mark variables that are obviously needed.  */
351   FOR_EACH_DEFINED_VARIABLE (vnode)
352     if (!vnode->can_remove_if_no_refs_p()
353 	&& !vnode->in_other_partition)
354       {
355 	reachable.add (vnode);
356 	enqueue_node (vnode, &first, &reachable);
357       }
358 
359   /* Perform reachability analysis.  */
360   while (first != (symtab_node *) (void *) 1)
361     {
362       bool in_boundary_p = !reachable.contains (first);
363       symtab_node *node = first;
364 
365       first = (symtab_node *)first->aux;
366 
367       /* If we are processing symbol in boundary, mark its AUX pointer for
368 	 possible later re-processing in enqueue_node.  */
369       if (in_boundary_p)
370 	{
371 	  node->aux = (void *)2;
372 	  if (node->alias && node->analyzed)
373 	    enqueue_node (node->get_alias_target (), &first, &reachable);
374 	}
375       else
376 	{
377 	  if (TREE_CODE (node->decl) == FUNCTION_DECL
378 	      && DECL_ABSTRACT_ORIGIN (node->decl))
379 	    {
380 	      struct cgraph_node *origin_node
381 	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
382 	      if (origin_node && !origin_node->used_as_abstract_origin)
383 		{
384 	          origin_node->used_as_abstract_origin = true;
385 		  gcc_assert (!origin_node->prev_sibling_clone);
386 		  gcc_assert (!origin_node->next_sibling_clone);
387 		  for (cgraph_node *n = origin_node->clones; n;
388 		       n = n->next_sibling_clone)
389 		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
390 		      n->used_as_abstract_origin = true;
391 		}
392 	    }
393 	  /* If any symbol in a comdat group is reachable, force
394 	     all externally visible symbols in the same comdat
395 	     group to be reachable as well.  Comdat-local symbols
396 	     can be discarded if all uses were inlined.  */
397 	  if (node->same_comdat_group)
398 	    {
399 	      symtab_node *next;
400 	      for (next = node->same_comdat_group;
401 		   next != node;
402 		   next = next->same_comdat_group)
403 		if (!next->comdat_local_p ()
404 		    && !reachable.add (next))
405 		  enqueue_node (next, &first, &reachable);
406 	    }
407 	  /* Mark references as reachable.  */
408 	  process_references (node, &first, &reachable);
409 	}
410 
411       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
412 	{
413 	  /* Mark the callees reachable unless they are direct calls to extern
414  	     inline functions we decided to not inline.  */
415 	  if (!in_boundary_p)
416 	    {
417 	      struct cgraph_edge *e;
418 	      /* Keep alive possible targets for devirtualization.  */
419 	      if (opt_for_fn (cnode->decl, optimize)
420 		  && opt_for_fn (cnode->decl, flag_devirtualize))
421 		{
422 		  struct cgraph_edge *next;
423 		  for (e = cnode->indirect_calls; e; e = next)
424 		    {
425 		      next = e->next_callee;
426 		      if (e->indirect_info->polymorphic)
427 			walk_polymorphic_call_targets (&reachable_call_targets,
428 						       e, &first, &reachable);
429 		    }
430 		}
431 	      for (e = cnode->callees; e; e = e->next_callee)
432 		{
433 	          symtab_node *body = e->callee->function_symbol ();
434 		  if (e->callee->definition
435 		      && !e->callee->in_other_partition
436 		      && (!e->inline_failed
437 			  || !DECL_EXTERNAL (e->callee->decl)
438 			  || e->callee->alias
439 			  || possible_inline_candidate_p (e->callee)))
440 		    {
441 		      /* Be sure that we will not optimize out alias target
442 			 body.  */
443 		      if (DECL_EXTERNAL (e->callee->decl)
444 			  && e->callee->alias
445 			  && symtab->state < IPA_SSA_AFTER_INLINING)
446 			reachable.add (body);
447 		      reachable.add (e->callee);
448 		    }
449 		  enqueue_node (e->callee, &first, &reachable);
450 		}
451 
452 	      /* When inline clone exists, mark body to be preserved so when removing
453 		 offline copy of the function we don't kill it.  */
454 	      if (cnode->global.inlined_to)
455 	        body_needed_for_clonning.add (cnode->decl);
456 
457 	      /* For non-inline clones, force their origins to the boundary and ensure
458 		 that body is not removed.  */
459 	      while (cnode->clone_of)
460 		{
461 		  bool noninline = cnode->clone_of->decl != cnode->decl;
462 		  cnode = cnode->clone_of;
463 		  if (noninline)
464 		    {
465 		      body_needed_for_clonning.add (cnode->decl);
466 		      enqueue_node (cnode, &first, &reachable);
467 		    }
468 		}
469 
470 	    }
471 	  else if (cnode->thunk.thunk_p)
472 	    enqueue_node (cnode->callees->callee, &first, &reachable);
473 
474 	  /* If any reachable function has simd clones, mark them as
475 	     reachable as well.  */
476 	  if (cnode->simd_clones)
477 	    {
478 	      cgraph_node *next;
479 	      for (next = cnode->simd_clones;
480 		   next;
481 		   next = next->simdclone->next_clone)
482 		if (in_boundary_p
483 		    || !reachable.add (next))
484 		  enqueue_node (next, &first, &reachable);
485 	    }
486 	}
487       /* When we see constructor of external variable, keep referred nodes in the
488 	boundary.  This will also hold initializers of the external vars NODE
489 	refers to.  */
490       varpool_node *vnode = dyn_cast <varpool_node *> (node);
491       if (vnode
492 	  && DECL_EXTERNAL (node->decl)
493 	  && !vnode->alias
494 	  && in_boundary_p)
495 	{
496 	  struct ipa_ref *ref = NULL;
497 	  for (int i = 0; node->iterate_reference (i, ref); i++)
498 	    enqueue_node (ref->referred, &first, &reachable);
499 	}
500     }
501 
502   /* Remove unreachable functions.   */
503   for (node = first_function (); node; node = next)
504     {
505       next = next_function (node);
506 
507       /* If node is not needed at all, remove it.  */
508       if (!node->aux)
509 	{
510 	  if (file)
511 	    fprintf (file, " %s", node->dump_name ());
512 	  node->remove ();
513 	  changed = true;
514 	}
515       /* If node is unreachable, remove its body.  */
516       else if (!reachable.contains (node))
517         {
518 	  /* We keep definitions of thunks and aliases in the boundary so
519 	     we can walk to the ultimate alias targets and function symbols
520 	     reliably.  */
521 	  if (node->alias || node->thunk.thunk_p)
522 	    ;
523 	  else if (!body_needed_for_clonning.contains (node->decl)
524 	      && !node->alias && !node->thunk.thunk_p)
525 	    node->release_body ();
526 	  else if (!node->clone_of)
527 	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
528 	  if (node->definition && !node->alias && !node->thunk.thunk_p)
529 	    {
530 	      if (file)
531 		fprintf (file, " %s", node->dump_name ());
532 	      node->body_removed = true;
533 	      node->analyzed = false;
534 	      node->definition = false;
535 	      node->cpp_implicit_alias = false;
536 	      node->alias = false;
537 	      node->transparent_alias = false;
538 	      node->thunk.thunk_p = false;
539 	      node->weakref = false;
540 	      /* After early inlining we drop always_inline attributes on
541 		 bodies of functions that are still referenced (have their
542 		 address taken).  */
543 	      DECL_ATTRIBUTES (node->decl)
544 		= remove_attribute ("always_inline",
545 				    DECL_ATTRIBUTES (node->decl));
546 	      if (!node->in_other_partition)
547 		node->local.local = false;
548 	      node->remove_callees ();
549 	      node->remove_all_references ();
550 	      changed = true;
551 	    }
552 	}
553       else
554 	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
555 		    || in_lto_p || DECL_RESULT (node->decl));
556     }
557 
558   /* Inline clones might be kept around so their materializing allows further
559      cloning.  If the function the clone is inlined into is removed, we need
560      to turn it into normal cone.  */
561   FOR_EACH_FUNCTION (node)
562     {
563       if (node->global.inlined_to
564 	  && !node->callers)
565 	{
566 	  gcc_assert (node->clones);
567 	  node->global.inlined_to = NULL;
568 	  update_inlined_to_pointer (node, node);
569 	}
570       node->aux = NULL;
571     }
572 
573   /* Remove unreachable variables.  */
574   if (file)
575     fprintf (file, "\nReclaiming variables:");
576   for (vnode = first_variable (); vnode; vnode = vnext)
577     {
578       vnext = next_variable (vnode);
579       if (!vnode->aux
580 	  /* For can_refer_decl_in_current_unit_p we want to track for
581 	     all external variables if they are defined in other partition
582 	     or not.  */
583 	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
584 	{
585 	  struct ipa_ref *ref = NULL;
586 
587 	  /* First remove the aliases, so varpool::remove can possibly lookup
588 	     the constructor and save it for future use.  */
589 	  while (vnode->iterate_direct_aliases (0, ref))
590 	    {
591 	      if (file)
592 		fprintf (file, " %s", ref->referred->dump_name ());
593 	      ref->referring->remove ();
594 	    }
595 	  if (file)
596 	    fprintf (file, " %s", vnode->dump_name ());
597           vnext = next_variable (vnode);
598 	  /* Signal removal to the debug machinery.  */
599 	  if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
600 	    {
601 	      vnode->definition = false;
602 	      (*debug_hooks->late_global_decl) (vnode->decl);
603 	    }
604 	  vnode->remove ();
605 	  changed = true;
606 	}
607       else if (!reachable.contains (vnode) && !vnode->alias)
608         {
609 	  tree init;
610 	  if (vnode->definition)
611 	    {
612 	      if (file)
613 		fprintf (file, " %s", vnode->name ());
614 	      changed = true;
615 	    }
616 	  /* Keep body if it may be useful for constant folding.  */
617 	  if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
618 	      || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
619 	    vnode->remove_initializer ();
620 	  else
621 	    DECL_INITIAL (vnode->decl) = init;
622 	  vnode->body_removed = true;
623 	  vnode->definition = false;
624 	  vnode->analyzed = false;
625 	  vnode->aux = NULL;
626 
627 	  vnode->remove_from_same_comdat_group ();
628 
629 	  vnode->remove_all_references ();
630 	}
631       else
632 	vnode->aux = NULL;
633     }
634 
635   /* Now update address_taken flags and try to promote functions to be local.  */
636   if (file)
637     fprintf (file, "\nClearing address taken flags:");
638   FOR_EACH_DEFINED_FUNCTION (node)
639     if (node->address_taken
640 	&& !node->used_from_other_partition)
641       {
642 	if (!node->call_for_symbol_and_aliases
643 	    (has_addr_references_p, NULL, true))
644 	  {
645 	    if (file)
646 	      fprintf (file, " %s", node->name ());
647 	    node->address_taken = false;
648 	    changed = true;
649 	    if (node->local_p ()
650 		/* Virtual functions may be kept in cgraph just because
651 		   of possible later devirtualization.  Do not mark them as
652 		   local too early so we won't optimize them out before
653 		   we are done with polymorphic call analysis.  */
654 		&& (symtab->state >= IPA_SSA_AFTER_INLINING
655 		    || !node->call_for_symbol_and_aliases
656 		       (is_indirect_call_target_p, NULL, true)))
657 	      {
658 		node->local.local = true;
659 		if (file)
660 		  fprintf (file, " (local)");
661 	      }
662 	  }
663       }
664   if (file)
665     fprintf (file, "\n");
666 
667   symtab_node::checking_verify_symtab_nodes ();
668 
669   /* If we removed something, perhaps profile could be improved.  */
670   if (changed && (optimize || in_lto_p) && ipa_call_summaries)
671     FOR_EACH_DEFINED_FUNCTION (node)
672       ipa_propagate_frequency (node);
673 
674   timevar_pop (TV_IPA_UNREACHABLE);
675   return changed;
676 }
677 
678 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
679    as needed, also clear EXPLICIT_REFS if the references to given variable
680    do not need to be explicit.  */
681 
682 void
process_references(varpool_node * vnode,bool * written,bool * address_taken,bool * read,bool * explicit_refs)683 process_references (varpool_node *vnode,
684 		    bool *written, bool *address_taken,
685 		    bool *read, bool *explicit_refs)
686 {
687   int i;
688   struct ipa_ref *ref;
689 
690   if (!vnode->all_refs_explicit_p ()
691       || TREE_THIS_VOLATILE (vnode->decl))
692     *explicit_refs = false;
693 
694   for (i = 0; vnode->iterate_referring (i, ref)
695 	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
696     switch (ref->use)
697       {
698       case IPA_REF_ADDR:
699 	*address_taken = true;
700 	break;
701       case IPA_REF_LOAD:
702 	*read = true;
703 	break;
704       case IPA_REF_STORE:
705 	*written = true;
706 	break;
707       case IPA_REF_ALIAS:
708 	process_references (dyn_cast<varpool_node *> (ref->referring), written,
709 			    address_taken, read, explicit_refs);
710 	break;
711       }
712 }
713 
714 /* Set TREE_READONLY bit.  */
715 
716 bool
set_readonly_bit(varpool_node * vnode,void * data ATTRIBUTE_UNUSED)717 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
718 {
719   TREE_READONLY (vnode->decl) = true;
720   return false;
721 }
722 
723 /* Set writeonly bit and clear the initalizer, since it will not be needed.  */
724 
725 bool
set_writeonly_bit(varpool_node * vnode,void * data)726 set_writeonly_bit (varpool_node *vnode, void *data)
727 {
728   vnode->writeonly = true;
729   if (optimize || in_lto_p)
730     {
731       DECL_INITIAL (vnode->decl) = NULL;
732       if (!vnode->alias)
733 	{
734 	  if (vnode->num_references ())
735 	    *(bool *)data = true;
736 	  vnode->remove_all_references ();
737 	}
738     }
739   return false;
740 }
741 
742 /* Clear addressale bit of VNODE.  */
743 
744 bool
clear_addressable_bit(varpool_node * vnode,void * data ATTRIBUTE_UNUSED)745 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
746 {
747   vnode->address_taken = false;
748   TREE_ADDRESSABLE (vnode->decl) = 0;
749   return false;
750 }
751 
752 /* Discover variables that have no longer address taken, are read-only or
753    write-only and update their flags.
754 
755    Return true when unreachable symbol removal should be done.
756 
757    FIXME: This cannot be done in between gimplify and omp_expand since
758    readonly flag plays role on what is shared and what is not.  Currently we do
759    this transformation as part of whole program visibility and re-do at
760    ipa-reference pass (to take into account clonning), but it would
761    make sense to do it before early optimizations.  */
762 
763 bool
ipa_discover_variable_flags(void)764 ipa_discover_variable_flags (void)
765 {
766   if (!flag_ipa_reference_addressable)
767     return false;
768 
769   bool remove_p = false;
770   varpool_node *vnode;
771   if (dump_file)
772     fprintf (dump_file, "Clearing variable flags:");
773   FOR_EACH_VARIABLE (vnode)
774     if (!vnode->alias
775 	&& (TREE_ADDRESSABLE (vnode->decl)
776 	    || !vnode->writeonly
777 	    || !TREE_READONLY (vnode->decl)))
778       {
779 	bool written = false;
780 	bool address_taken = false;
781 	bool read = false;
782 	bool explicit_refs = true;
783 
784 	process_references (vnode, &written, &address_taken, &read,
785 			    &explicit_refs);
786 	if (!explicit_refs)
787 	  continue;
788 	if (!address_taken)
789 	  {
790 	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
791 	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
792 	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
793 					        true);
794 	  }
795 	if (!address_taken && !written
796 	    /* Making variable in explicit section readonly can cause section
797 	       type conflict.
798 	       See e.g. gcc.c-torture/compile/pr23237.c */
799 	    && vnode->get_section () == NULL)
800 	  {
801 	    if (!TREE_READONLY (vnode->decl) && dump_file)
802 	      fprintf (dump_file, " %s (read-only)", vnode->name ());
803 	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
804 	  }
805 	if (!vnode->writeonly && !read && !address_taken && written)
806 	  {
807 	    if (dump_file)
808 	      fprintf (dump_file, " %s (write-only)", vnode->name ());
809 	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
810 					        true);
811 	  }
812       }
813   if (dump_file)
814     fprintf (dump_file, "\n");
815   return remove_p;
816 }
817 
818 /* Generate and emit a static constructor or destructor.  WHICH must
819    be one of 'I' (for a constructor), 'D' (for a destructor).
820    BODY is a STATEMENT_LIST containing GENERIC
821    statements.  PRIORITY is the initialization priority for this
822    constructor or destructor.
823 
824    FINAL specify whether the externally visible name for collect2 should
825    be produced. */
826 
827 static void
cgraph_build_static_cdtor_1(char which,tree body,int priority,bool final,tree optimization,tree target)828 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
829 			     tree optimization,
830 			     tree target)
831 {
832   static int counter = 0;
833   char which_buf[16];
834   tree decl, name, resdecl;
835 
836   /* The priority is encoded in the constructor or destructor name.
837      collect2 will sort the names and arrange that they are called at
838      program startup.  */
839   if (final)
840     sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
841   else
842   /* Proudce sane name but one not recognizable by collect2, just for the
843      case we fail to inline the function.  */
844     sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
845   name = get_file_function_name (which_buf);
846 
847   decl = build_decl (input_location, FUNCTION_DECL, name,
848 		     build_function_type_list (void_type_node, NULL_TREE));
849   current_function_decl = decl;
850 
851   resdecl = build_decl (input_location,
852 			RESULT_DECL, NULL_TREE, void_type_node);
853   DECL_ARTIFICIAL (resdecl) = 1;
854   DECL_RESULT (decl) = resdecl;
855   DECL_CONTEXT (resdecl) = decl;
856 
857   allocate_struct_function (decl, false);
858 
859   TREE_STATIC (decl) = 1;
860   TREE_USED (decl) = 1;
861   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
862   DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
863   DECL_ARTIFICIAL (decl) = 1;
864   DECL_IGNORED_P (decl) = 1;
865   DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
866   DECL_SAVED_TREE (decl) = body;
867   if (!targetm.have_ctors_dtors && final)
868     {
869       TREE_PUBLIC (decl) = 1;
870       DECL_PRESERVE_P (decl) = 1;
871     }
872   DECL_UNINLINABLE (decl) = 1;
873 
874   DECL_INITIAL (decl) = make_node (BLOCK);
875   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
876   TREE_USED (DECL_INITIAL (decl)) = 1;
877 
878   DECL_SOURCE_LOCATION (decl) = input_location;
879   cfun->function_end_locus = input_location;
880 
881   switch (which)
882     {
883     case 'I':
884       DECL_STATIC_CONSTRUCTOR (decl) = 1;
885       decl_init_priority_insert (decl, priority);
886       break;
887     case 'D':
888       DECL_STATIC_DESTRUCTOR (decl) = 1;
889       decl_fini_priority_insert (decl, priority);
890       break;
891     default:
892       gcc_unreachable ();
893     }
894 
895   gimplify_function_tree (decl);
896 
897   cgraph_node::add_new_function (decl, false);
898 
899   set_cfun (NULL);
900   current_function_decl = NULL;
901 }
902 
903 /* Generate and emit a static constructor or destructor.  WHICH must
904    be one of 'I' (for a constructor) or 'D' (for a destructor).
905    BODY is a STATEMENT_LIST containing GENERIC
906    statements.  PRIORITY is the initialization priority for this
907    constructor or destructor.  */
908 
909 void
cgraph_build_static_cdtor(char which,tree body,int priority)910 cgraph_build_static_cdtor (char which, tree body, int priority)
911 {
912   cgraph_build_static_cdtor_1 (which, body, priority, false, NULL, NULL);
913 }
914 
915 /* When target does not have ctors and dtors, we call all constructor
916    and destructor by special initialization/destruction function
917    recognized by collect2.
918 
919    When we are going to build this function, collect all constructors and
920    destructors and turn them into normal functions.  */
921 
922 static void
record_cdtor_fn(struct cgraph_node * node,vec<tree> * ctors,vec<tree> * dtors)923 record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
924 {
925   if (DECL_STATIC_CONSTRUCTOR (node->decl))
926     ctors->safe_push (node->decl);
927   if (DECL_STATIC_DESTRUCTOR (node->decl))
928     dtors->safe_push (node->decl);
929   node = cgraph_node::get (node->decl);
930   DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
931 }
932 
933 /* Define global constructors/destructor functions for the CDTORS, of
934    which they are LEN.  The CDTORS are sorted by initialization
935    priority.  If CTOR_P is true, these are constructors; otherwise,
936    they are destructors.  */
937 
938 static void
build_cdtor(bool ctor_p,const vec<tree> & cdtors)939 build_cdtor (bool ctor_p, const vec<tree> &cdtors)
940 {
941   size_t i,j;
942   size_t len = cdtors.length ();
943 
944   i = 0;
945   while (i < len)
946     {
947       tree body;
948       tree fn;
949       priority_type priority;
950 
951       priority = 0;
952       body = NULL_TREE;
953       j = i;
954       do
955 	{
956 	  priority_type p;
957 	  fn = cdtors[j];
958 	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
959 	  if (j == i)
960 	    priority = p;
961 	  else if (p != priority)
962 	    break;
963 	  j++;
964 	}
965       while (j < len);
966 
967       /* When there is only one cdtor and target supports them, do nothing.  */
968       if (j == i + 1
969 	  && targetm.have_ctors_dtors)
970 	{
971 	  i++;
972 	  continue;
973 	}
974       /* Find the next batch of constructors/destructors with the same
975 	 initialization priority.  */
976       for (;i < j; i++)
977 	{
978 	  tree call;
979 	  fn = cdtors[i];
980 	  call = build_call_expr (fn, 0);
981 	  if (ctor_p)
982 	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
983 	  else
984 	    DECL_STATIC_DESTRUCTOR (fn) = 0;
985 	  /* We do not want to optimize away pure/const calls here.
986 	     When optimizing, these should be already removed, when not
987 	     optimizing, we want user to be able to breakpoint in them.  */
988 	  TREE_SIDE_EFFECTS (call) = 1;
989 	  append_to_statement_list (call, &body);
990 	}
991       gcc_assert (body != NULL_TREE);
992       /* Generate a function to call all the function of like
993 	 priority.  */
994       cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
995 				   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
996 				   DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
997     }
998 }
999 
1000 /* Comparison function for qsort.  P1 and P2 are actually of type
1001    "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
1002    used to determine the sort order.  */
1003 
1004 static int
compare_ctor(const void * p1,const void * p2)1005 compare_ctor (const void *p1, const void *p2)
1006 {
1007   tree f1;
1008   tree f2;
1009   int priority1;
1010   int priority2;
1011 
1012   f1 = *(const tree *)p1;
1013   f2 = *(const tree *)p2;
1014   priority1 = DECL_INIT_PRIORITY (f1);
1015   priority2 = DECL_INIT_PRIORITY (f2);
1016 
1017   if (priority1 < priority2)
1018     return -1;
1019   else if (priority1 > priority2)
1020     return 1;
1021   else
1022     /* Ensure a stable sort.  Constructors are executed in backwarding
1023        order to make LTO initialize braries first.  */
1024     return DECL_UID (f2) - DECL_UID (f1);
1025 }
1026 
1027 /* Comparison function for qsort.  P1 and P2 are actually of type
1028    "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
1029    used to determine the sort order.  */
1030 
1031 static int
compare_dtor(const void * p1,const void * p2)1032 compare_dtor (const void *p1, const void *p2)
1033 {
1034   tree f1;
1035   tree f2;
1036   int priority1;
1037   int priority2;
1038 
1039   f1 = *(const tree *)p1;
1040   f2 = *(const tree *)p2;
1041   priority1 = DECL_FINI_PRIORITY (f1);
1042   priority2 = DECL_FINI_PRIORITY (f2);
1043 
1044   if (priority1 < priority2)
1045     return -1;
1046   else if (priority1 > priority2)
1047     return 1;
1048   else
1049     /* Ensure a stable sort.  */
1050     return DECL_UID (f1) - DECL_UID (f2);
1051 }
1052 
1053 /* Generate functions to call static constructors and destructors
1054    for targets that do not support .ctors/.dtors sections.  These
1055    functions have magic names which are detected by collect2.  */
1056 
1057 static void
build_cdtor_fns(vec<tree> * ctors,vec<tree> * dtors)1058 build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1059 {
1060   if (!ctors->is_empty ())
1061     {
1062       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1063       ctors->qsort (compare_ctor);
1064       build_cdtor (/*ctor_p=*/true, *ctors);
1065     }
1066 
1067   if (!dtors->is_empty ())
1068     {
1069       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1070       dtors->qsort (compare_dtor);
1071       build_cdtor (/*ctor_p=*/false, *dtors);
1072     }
1073 }
1074 
1075 /* Look for constructors and destructors and produce function calling them.
1076    This is needed for targets not supporting ctors or dtors, but we perform the
1077    transformation also at linktime to merge possibly numerous
1078    constructors/destructors into single function to improve code locality and
1079    reduce size.  */
1080 
1081 static unsigned int
ipa_cdtor_merge(void)1082 ipa_cdtor_merge (void)
1083 {
1084   /* A vector of FUNCTION_DECLs declared as static constructors.  */
1085   auto_vec<tree, 20> ctors;
1086   /* A vector of FUNCTION_DECLs declared as static destructors.  */
1087   auto_vec<tree, 20> dtors;
1088   struct cgraph_node *node;
1089   FOR_EACH_DEFINED_FUNCTION (node)
1090     if (DECL_STATIC_CONSTRUCTOR (node->decl)
1091 	|| DECL_STATIC_DESTRUCTOR (node->decl))
1092        record_cdtor_fn (node, &ctors, &dtors);
1093   build_cdtor_fns (&ctors, &dtors);
1094   return 0;
1095 }
1096 
1097 namespace {
1098 
1099 const pass_data pass_data_ipa_cdtor_merge =
1100 {
1101   IPA_PASS, /* type */
1102   "cdtor", /* name */
1103   OPTGROUP_NONE, /* optinfo_flags */
1104   TV_CGRAPHOPT, /* tv_id */
1105   0, /* properties_required */
1106   0, /* properties_provided */
1107   0, /* properties_destroyed */
1108   0, /* todo_flags_start */
1109   0, /* todo_flags_finish */
1110 };
1111 
1112 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1113 {
1114 public:
pass_ipa_cdtor_merge(gcc::context * ctxt)1115   pass_ipa_cdtor_merge (gcc::context *ctxt)
1116     : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1117 		      NULL, /* generate_summary */
1118 		      NULL, /* write_summary */
1119 		      NULL, /* read_summary */
1120 		      NULL, /* write_optimization_summary */
1121 		      NULL, /* read_optimization_summary */
1122 		      NULL, /* stmt_fixup */
1123 		      0, /* function_transform_todo_flags_start */
1124 		      NULL, /* function_transform */
1125 		      NULL) /* variable_transform */
1126   {}
1127 
1128   /* opt_pass methods: */
1129   virtual bool gate (function *);
execute(function *)1130   virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1131 
1132 }; // class pass_ipa_cdtor_merge
1133 
1134 bool
gate(function *)1135 pass_ipa_cdtor_merge::gate (function *)
1136 {
1137   /* Perform the pass when we have no ctors/dtors support
1138      or at LTO time to merge multiple constructors into single
1139      function.  */
1140   return !targetm.have_ctors_dtors || in_lto_p;
1141 }
1142 
1143 } // anon namespace
1144 
1145 ipa_opt_pass_d *
make_pass_ipa_cdtor_merge(gcc::context * ctxt)1146 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1147 {
1148   return new pass_ipa_cdtor_merge (ctxt);
1149 }
1150 
1151 /* Invalid pointer representing BOTTOM for single user dataflow.  */
1152 #define BOTTOM ((cgraph_node *)(size_t) 2)
1153 
1154 /* Meet operation for single user dataflow.
1155    Here we want to associate variables with sigle function that may access it.
1156 
1157    FUNCTION is current single user of a variable, VAR is variable that uses it.
1158    Latttice is stored in SINGLE_USER_MAP.
1159 
1160    We represent:
1161     - TOP by no entry in SIGNLE_USER_MAP
1162     - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1163     - known single user by cgraph pointer in SINGLE_USER_MAP.  */
1164 
1165 cgraph_node *
meet(cgraph_node * function,varpool_node * var,hash_map<varpool_node *,cgraph_node * > & single_user_map)1166 meet (cgraph_node *function, varpool_node *var,
1167        hash_map<varpool_node *, cgraph_node *> &single_user_map)
1168 {
1169   struct cgraph_node *user, **f;
1170 
1171   if (var->aux == BOTTOM)
1172     return BOTTOM;
1173 
1174   f = single_user_map.get (var);
1175   if (!f)
1176     return function;
1177   user = *f;
1178   if (!function)
1179     return user;
1180   else if (function != user)
1181     return BOTTOM;
1182   else
1183     return function;
1184 }
1185 
1186 /* Propagation step of single-use dataflow.
1187 
1188    Check all uses of VNODE and see if they are used by single function FUNCTION.
1189    SINGLE_USER_MAP represents the dataflow lattice.  */
1190 
1191 cgraph_node *
propagate_single_user(varpool_node * vnode,cgraph_node * function,hash_map<varpool_node *,cgraph_node * > & single_user_map)1192 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1193 		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1194 {
1195   int i;
1196   struct ipa_ref *ref;
1197 
1198   gcc_assert (!vnode->externally_visible);
1199 
1200   /* If node is an alias, first meet with its target.  */
1201   if (vnode->alias)
1202     function = meet (function, vnode->get_alias_target (), single_user_map);
1203 
1204   /* Check all users and see if they correspond to a single function.  */
1205   for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1206     {
1207       struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1208       if (cnode)
1209 	{
1210 	  if (cnode->global.inlined_to)
1211 	    cnode = cnode->global.inlined_to;
1212 	  if (!function)
1213 	    function = cnode;
1214 	  else if (function != cnode)
1215 	    function = BOTTOM;
1216 	}
1217       else
1218 	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1219 			 single_user_map);
1220     }
1221   return function;
1222 }
1223 
1224 /* Pass setting used_by_single_function flag.
1225    This flag is set on variable when there is only one function that may
1226    possibly referr to it.  */
1227 
1228 static unsigned int
ipa_single_use(void)1229 ipa_single_use (void)
1230 {
1231   varpool_node *first = (varpool_node *) (void *) 1;
1232   varpool_node *var;
1233   hash_map<varpool_node *, cgraph_node *> single_user_map;
1234 
1235   FOR_EACH_DEFINED_VARIABLE (var)
1236     if (!var->all_refs_explicit_p ())
1237       var->aux = BOTTOM;
1238     else
1239       {
1240 	/* Enqueue symbol for dataflow.  */
1241         var->aux = first;
1242 	first = var;
1243       }
1244 
1245   /* The actual dataflow.  */
1246 
1247   while (first != (void *) 1)
1248     {
1249       cgraph_node *user, *orig_user, **f;
1250 
1251       var = first;
1252       first = (varpool_node *)first->aux;
1253 
1254       f = single_user_map.get (var);
1255       if (f)
1256 	orig_user = *f;
1257       else
1258 	orig_user = NULL;
1259       user = propagate_single_user (var, orig_user, single_user_map);
1260 
1261       gcc_checking_assert (var->aux != BOTTOM);
1262 
1263       /* If user differs, enqueue all references.  */
1264       if (user != orig_user)
1265 	{
1266 	  unsigned int i;
1267 	  ipa_ref *ref;
1268 
1269 	  single_user_map.put (var, user);
1270 
1271 	  /* Enqueue all aliases for re-processing.  */
1272 	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1273 	    if (!ref->referring->aux)
1274 	      {
1275 		ref->referring->aux = first;
1276 		first = dyn_cast <varpool_node *> (ref->referring);
1277 	      }
1278 	  /* Enqueue all users for re-processing.  */
1279 	  for (i = 0; var->iterate_reference (i, ref); i++)
1280 	    if (!ref->referred->aux
1281 	        && ref->referred->definition
1282 		&& is_a <varpool_node *> (ref->referred))
1283 	      {
1284 		ref->referred->aux = first;
1285 		first = dyn_cast <varpool_node *> (ref->referred);
1286 	      }
1287 
1288 	  /* If user is BOTTOM, just punt on this var.  */
1289 	  if (user == BOTTOM)
1290 	    var->aux = BOTTOM;
1291 	  else
1292 	    var->aux = NULL;
1293 	}
1294       else
1295 	var->aux = NULL;
1296     }
1297 
1298   FOR_EACH_DEFINED_VARIABLE (var)
1299     {
1300       if (var->aux != BOTTOM)
1301 	{
1302 	  /* Not having the single user known means that the VAR is
1303 	     unreachable.  Either someone forgot to remove unreachable
1304 	     variables or the reachability here is wrong.  */
1305 
1306 	  gcc_checking_assert (single_user_map.get (var));
1307 
1308 	  if (dump_file)
1309 	    {
1310 	      fprintf (dump_file, "Variable %s is used by single function\n",
1311 		       var->dump_name ());
1312 	    }
1313 	  var->used_by_single_function = true;
1314 	}
1315       var->aux = NULL;
1316     }
1317   return 0;
1318 }
1319 
1320 namespace {
1321 
1322 const pass_data pass_data_ipa_single_use =
1323 {
1324   IPA_PASS, /* type */
1325   "single-use", /* name */
1326   OPTGROUP_NONE, /* optinfo_flags */
1327   TV_CGRAPHOPT, /* tv_id */
1328   0, /* properties_required */
1329   0, /* properties_provided */
1330   0, /* properties_destroyed */
1331   0, /* todo_flags_start */
1332   0, /* todo_flags_finish */
1333 };
1334 
1335 class pass_ipa_single_use : public ipa_opt_pass_d
1336 {
1337 public:
pass_ipa_single_use(gcc::context * ctxt)1338   pass_ipa_single_use (gcc::context *ctxt)
1339     : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1340 		      NULL, /* generate_summary */
1341 		      NULL, /* write_summary */
1342 		      NULL, /* read_summary */
1343 		      NULL, /* write_optimization_summary */
1344 		      NULL, /* read_optimization_summary */
1345 		      NULL, /* stmt_fixup */
1346 		      0, /* function_transform_todo_flags_start */
1347 		      NULL, /* function_transform */
1348 		      NULL) /* variable_transform */
1349   {}
1350 
1351   /* opt_pass methods: */
execute(function *)1352   virtual unsigned int execute (function *) { return ipa_single_use (); }
1353 
1354 }; // class pass_ipa_single_use
1355 
1356 } // anon namespace
1357 
1358 ipa_opt_pass_d *
make_pass_ipa_single_use(gcc::context * ctxt)1359 make_pass_ipa_single_use (gcc::context *ctxt)
1360 {
1361   return new pass_ipa_single_use (ctxt);
1362 }
1363 
1364 /* Materialize all clones.  */
1365 
1366 namespace {
1367 
1368 const pass_data pass_data_materialize_all_clones =
1369 {
1370   SIMPLE_IPA_PASS, /* type */
1371   "materialize-all-clones", /* name */
1372   OPTGROUP_NONE, /* optinfo_flags */
1373   TV_IPA_OPT, /* tv_id */
1374   0, /* properties_required */
1375   0, /* properties_provided */
1376   0, /* properties_destroyed */
1377   0, /* todo_flags_start */
1378   0, /* todo_flags_finish */
1379 };
1380 
1381 class pass_materialize_all_clones : public simple_ipa_opt_pass
1382 {
1383 public:
pass_materialize_all_clones(gcc::context * ctxt)1384   pass_materialize_all_clones (gcc::context *ctxt)
1385     : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1386   {}
1387 
1388   /* opt_pass methods: */
execute(function *)1389   virtual unsigned int execute (function *)
1390     {
1391       symtab->materialize_all_clones ();
1392       return 0;
1393     }
1394 
1395 }; // class pass_materialize_all_clones
1396 
1397 } // anon namespace
1398 
1399 simple_ipa_opt_pass *
make_pass_materialize_all_clones(gcc::context * ctxt)1400 make_pass_materialize_all_clones (gcc::context *ctxt)
1401 {
1402   return new pass_materialize_all_clones (ctxt);
1403 }
1404