xref: /dragonfly/contrib/gcc-8.0/gcc/ipa.c (revision eea5ad68)
1 /* Basic IPA optimizations and utilities.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "gimplify.h"
32 #include "tree-iterator.h"
33 #include "ipa-utils.h"
34 #include "symbol-summary.h"
35 #include "tree-vrp.h"
36 #include "ipa-prop.h"
37 #include "ipa-fnsummary.h"
38 #include "dbgcnt.h"
39 #include "debug.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42 
43 /* Return true when NODE has ADDR reference.  */
44 
45 static bool
46 has_addr_references_p (struct cgraph_node *node,
47 		       void *)
48 {
49   int i;
50   struct ipa_ref *ref = NULL;
51 
52   for (i = 0; node->iterate_referring (i, ref); i++)
53     if (ref->use == IPA_REF_ADDR)
54       return true;
55   return false;
56 }
57 
58 /* Return true when NODE can be target of an indirect call.  */
59 
60 static bool
61 is_indirect_call_target_p (struct cgraph_node *node, void *)
62 {
63   return node->indirect_call_target;
64 }
65 
66 /* Look for all functions inlined to NODE and update their inlined_to pointers
67    to INLINED_TO.  */
68 
69 static void
70 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71 {
72   struct cgraph_edge *e;
73   for (e = node->callees; e; e = e->next_callee)
74     if (e->callee->global.inlined_to)
75       {
76         e->callee->global.inlined_to = inlined_to;
77 	update_inlined_to_pointer (e->callee, inlined_to);
78       }
79 }
80 
81 /* Add symtab NODE to queue starting at FIRST.
82 
83    The queue is linked via AUX pointers and terminated by pointer to 1.
84    We enqueue nodes at two occasions: when we find them reachable or when we find
85    their bodies needed for further clonning.  In the second case we mark them
86    by pointer to 2 after processing so they are re-queue when they become
87    reachable.  */
88 
89 static void
90 enqueue_node (symtab_node *node, symtab_node **first,
91 	      hash_set<symtab_node *> *reachable)
92 {
93   /* Node is still in queue; do nothing.  */
94   if (node->aux && node->aux != (void *) 2)
95     return;
96   /* Node was already processed as unreachable, re-enqueue
97      only if it became reachable now.  */
98   if (node->aux == (void *)2 && !reachable->contains (node))
99     return;
100   node->aux = *first;
101   *first = node;
102 }
103 
104 /* Process references.  */
105 
106 static void
107 process_references (symtab_node *snode,
108 		    symtab_node **first,
109 		    bool before_inlining_p,
110 		    hash_set<symtab_node *> *reachable)
111 {
112   int i;
113   struct ipa_ref *ref = NULL;
114   for (i = 0; snode->iterate_reference (i, ref); i++)
115     {
116       symtab_node *node = ref->referred;
117       symtab_node *body = node->ultimate_alias_target ();
118 
119       if (node->definition && !node->in_other_partition
120 	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
121 	      || (((before_inlining_p
122 		    && (TREE_CODE (node->decl) != FUNCTION_DECL
123 			|| (TREE_CODE (node->decl) == FUNCTION_DECL
124 			    && opt_for_fn (body->decl, optimize))
125 		        || (symtab->state < IPA_SSA
126 		            && lookup_attribute
127 				 ("always_inline",
128 			          DECL_ATTRIBUTES (body->decl))))))
129 		  /* We use variable constructors during late compilation for
130 		     constant folding.  Keep references alive so partitioning
131 		     knows about potential references.  */
132 		  || (VAR_P (node->decl)
133 		      && flag_wpa
134 		      && ctor_for_folding (node->decl)
135 		         != error_mark_node))))
136 	{
137 	  /* Be sure that we will not optimize out alias target
138 	     body.  */
139 	  if (DECL_EXTERNAL (node->decl)
140 	      && node->alias
141 	      && before_inlining_p)
142 	    reachable->add (body);
143 	  reachable->add (node);
144 	}
145       enqueue_node (node, first, reachable);
146     }
147 }
148 
149 /* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
150    all its potential targets as reachable to permit later inlining if
151    devirtualization happens.  After inlining still keep their declarations
152    around, so we can devirtualize to a direct call.
153 
154    Also try to make trivial devirutalization when no or only one target is
155    possible.  */
156 
157 static void
158 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
159 			       struct cgraph_edge *edge,
160 			       symtab_node **first,
161 			       hash_set<symtab_node *> *reachable,
162 			       bool before_inlining_p)
163 {
164   unsigned int i;
165   void *cache_token;
166   bool final;
167   vec <cgraph_node *>targets
168     = possible_polymorphic_call_targets
169 	(edge, &final, &cache_token);
170 
171   if (!reachable_call_targets->add (cache_token))
172     {
173       for (i = 0; i < targets.length (); i++)
174 	{
175 	  struct cgraph_node *n = targets[i];
176 
177 	  /* Do not bother to mark virtual methods in anonymous namespace;
178 	     either we will find use of virtual table defining it, or it is
179 	     unused.  */
180 	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
181 	      && type_in_anonymous_namespace_p
182 		    (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
183 	    continue;
184 
185 	  n->indirect_call_target = true;
186 	  symtab_node *body = n->function_symbol ();
187 
188 	  /* Prior inlining, keep alive bodies of possible targets for
189 	     devirtualization.  */
190 	  if (n->definition
191 	      && (before_inlining_p
192 		  && opt_for_fn (body->decl, optimize)
193 		  && opt_for_fn (body->decl, flag_devirtualize)))
194 	     {
195 		/* Be sure that we will not optimize out alias target
196 		   body.  */
197 		if (DECL_EXTERNAL (n->decl)
198 		    && n->alias
199 		    && before_inlining_p)
200 		  reachable->add (body);
201 	       reachable->add (n);
202 	     }
203 	  /* Even after inlining we want to keep the possible targets in the
204 	     boundary, so late passes can still produce direct call even if
205 	     the chance for inlining is lost.  */
206 	  enqueue_node (n, first, reachable);
207 	}
208     }
209 
210   /* Very trivial devirtualization; when the type is
211      final or anonymous (so we know all its derivation)
212      and there is only one possible virtual call target,
213      make the edge direct.  */
214   if (final)
215     {
216       if (targets.length () <= 1 && dbg_cnt (devirt))
217 	{
218 	  cgraph_node *target, *node = edge->caller;
219 	  if (targets.length () == 1)
220 	    target = targets[0];
221 	  else
222 	    target = cgraph_node::get_create
223 		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
224 
225 	  if (dump_enabled_p ())
226             {
227 	      location_t locus;
228 	      if (edge->call_stmt)
229 		locus = gimple_location (edge->call_stmt);
230 	      else
231 		locus = UNKNOWN_LOCATION;
232 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
233 			       "devirtualizing call in %s to %s\n",
234 			       edge->caller->dump_name (),
235 			       target->dump_name ());
236 	    }
237 	  edge = edge->make_direct (target);
238 	  if (ipa_fn_summaries)
239 	    ipa_update_overall_fn_summary (node);
240 	  else if (edge->call_stmt)
241 	    {
242 	      edge->redirect_call_stmt_to_callee ();
243 
244 	      /* Call to __builtin_unreachable shouldn't be instrumented.  */
245 	      if (!targets.length ())
246 		gimple_call_set_with_bounds (edge->call_stmt, false);
247 	    }
248 	}
249     }
250 }
251 
252 /* Perform reachability analysis and reclaim all unreachable nodes.
253 
254    The algorithm is basically mark&sweep but with some extra refinements:
255 
256    - reachable extern inline functions needs special handling; the bodies needs
257      to stay in memory until inlining in hope that they will be inlined.
258      After inlining we release their bodies and turn them into unanalyzed
259      nodes even when they are reachable.
260 
261    - virtual functions are kept in callgraph even if they seem unreachable in
262      hope calls to them will be devirtualized.
263 
264      Again we remove them after inlining.  In late optimization some
265      devirtualization may happen, but it is not important since we won't inline
266      the call. In theory early opts and IPA should work out all important cases.
267 
268    - virtual clones needs bodies of their origins for later materialization;
269      this means that we want to keep the body even if the origin is unreachable
270      otherwise.  To avoid origin from sitting in the callgraph and being
271      walked by IPA passes, we turn them into unanalyzed nodes with body
272      defined.
273 
274      We maintain set of function declaration where body needs to stay in
275      body_needed_for_clonning
276 
277      Inline clones represent special case: their declaration match the
278      declaration of origin and cgraph_remove_node already knows how to
279      reshape callgraph and preserve body when offline copy of function or
280      inline clone is being removed.
281 
282    - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
283      variables with DECL_INITIAL set.  We finalize these and keep reachable
284      ones around for constant folding purposes.  After inlining we however
285      stop walking their references to let everything static referneced by them
286      to be removed when it is otherwise unreachable.
287 
288    We maintain queue of both reachable symbols (i.e. defined symbols that needs
289    to stay) and symbols that are in boundary (i.e. external symbols referenced
290    by reachable symbols or origins of clones).  The queue is represented
291    as linked list by AUX pointer terminated by 1.
292 
293    At the end we keep all reachable symbols. For symbols in boundary we always
294    turn definition into a declaration, but we may keep function body around
295    based on body_needed_for_clonning
296 
297    All symbols that enter the queue have AUX pointer non-zero and are in the
298    boundary.  Pointer set REACHABLE is used to track reachable symbols.
299 
300    Every symbol can be visited twice - once as part of boundary and once
301    as real reachable symbol. enqueue_node needs to decide whether the
302    node needs to be re-queued for second processing.  For this purpose
303    we set AUX pointer of processed symbols in the boundary to constant 2.  */
304 
305 bool
306 symbol_table::remove_unreachable_nodes (FILE *file)
307 {
308   symtab_node *first = (symtab_node *) (void *) 1;
309   struct cgraph_node *node, *next;
310   varpool_node *vnode, *vnext;
311   bool changed = false;
312   hash_set<symtab_node *> reachable;
313   hash_set<tree> body_needed_for_clonning;
314   hash_set<void *> reachable_call_targets;
315   bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
316 					    : IPA_SSA_AFTER_INLINING);
317 
318   timevar_push (TV_IPA_UNREACHABLE);
319   build_type_inheritance_graph ();
320   if (file)
321     fprintf (file, "\nReclaiming functions:");
322   if (flag_checking)
323     {
324       FOR_EACH_FUNCTION (node)
325 	gcc_assert (!node->aux);
326       FOR_EACH_VARIABLE (vnode)
327 	gcc_assert (!vnode->aux);
328     }
329   /* Mark functions whose bodies are obviously needed.
330      This is mostly when they can be referenced externally.  Inline clones
331      are special since their declarations are shared with master clone and thus
332      cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
333   FOR_EACH_FUNCTION (node)
334     {
335       node->used_as_abstract_origin = false;
336       node->indirect_call_target = false;
337       if (node->definition
338 	  && !node->global.inlined_to
339 	  && !node->in_other_partition
340 	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
341 	{
342 	  gcc_assert (!node->global.inlined_to);
343 	  reachable.add (node);
344 	  enqueue_node (node, &first, &reachable);
345 	}
346       else
347 	gcc_assert (!node->aux);
348      }
349 
350   /* Mark variables that are obviously needed.  */
351   FOR_EACH_DEFINED_VARIABLE (vnode)
352     if (!vnode->can_remove_if_no_refs_p()
353 	&& !vnode->in_other_partition)
354       {
355 	reachable.add (vnode);
356 	enqueue_node (vnode, &first, &reachable);
357       }
358 
359   /* Perform reachability analysis.  */
360   while (first != (symtab_node *) (void *) 1)
361     {
362       bool in_boundary_p = !reachable.contains (first);
363       symtab_node *node = first;
364 
365       first = (symtab_node *)first->aux;
366 
367       /* If we are processing symbol in boundary, mark its AUX pointer for
368 	 possible later re-processing in enqueue_node.  */
369       if (in_boundary_p)
370 	{
371 	  node->aux = (void *)2;
372 	  if (node->alias && node->analyzed)
373 	    enqueue_node (node->get_alias_target (), &first, &reachable);
374 	}
375       else
376 	{
377 	  if (TREE_CODE (node->decl) == FUNCTION_DECL
378 	      && DECL_ABSTRACT_ORIGIN (node->decl))
379 	    {
380 	      struct cgraph_node *origin_node
381 	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
382 	      if (origin_node && !origin_node->used_as_abstract_origin)
383 		{
384 	          origin_node->used_as_abstract_origin = true;
385 		  gcc_assert (!origin_node->prev_sibling_clone);
386 		  gcc_assert (!origin_node->next_sibling_clone);
387 		  for (cgraph_node *n = origin_node->clones; n;
388 		       n = n->next_sibling_clone)
389 		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
390 		      n->used_as_abstract_origin = true;
391 		}
392 	    }
393 	  /* If any symbol in a comdat group is reachable, force
394 	     all externally visible symbols in the same comdat
395 	     group to be reachable as well.  Comdat-local symbols
396 	     can be discarded if all uses were inlined.  */
397 	  if (node->same_comdat_group)
398 	    {
399 	      symtab_node *next;
400 	      for (next = node->same_comdat_group;
401 		   next != node;
402 		   next = next->same_comdat_group)
403 		if (!next->comdat_local_p ()
404 		    && !reachable.add (next))
405 		  enqueue_node (next, &first, &reachable);
406 	    }
407 	  /* Mark references as reachable.  */
408 	  process_references (node, &first, before_inlining_p, &reachable);
409 	}
410 
411       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
412 	{
413 	  /* Mark the callees reachable unless they are direct calls to extern
414  	     inline functions we decided to not inline.  */
415 	  if (!in_boundary_p)
416 	    {
417 	      struct cgraph_edge *e;
418 	      /* Keep alive possible targets for devirtualization.  */
419 	      if (opt_for_fn (cnode->decl, optimize)
420 		  && opt_for_fn (cnode->decl, flag_devirtualize))
421 		{
422 		  struct cgraph_edge *next;
423 		  for (e = cnode->indirect_calls; e; e = next)
424 		    {
425 		      next = e->next_callee;
426 		      if (e->indirect_info->polymorphic)
427 			walk_polymorphic_call_targets (&reachable_call_targets,
428 						       e, &first, &reachable,
429 						       before_inlining_p);
430 		    }
431 		}
432 	      for (e = cnode->callees; e; e = e->next_callee)
433 		{
434 	          symtab_node *body = e->callee->function_symbol ();
435 		  if (e->callee->definition
436 		      && !e->callee->in_other_partition
437 		      && (!e->inline_failed
438 			  || !DECL_EXTERNAL (e->callee->decl)
439 			  || e->callee->alias
440 			  || (before_inlining_p
441 			      && (opt_for_fn (body->decl, optimize)
442 		                  || (symtab->state < IPA_SSA
443 		                      && lookup_attribute
444 				          ("always_inline",
445 				           DECL_ATTRIBUTES (body->decl)))))))
446 		    {
447 		      /* Be sure that we will not optimize out alias target
448 			 body.  */
449 		      if (DECL_EXTERNAL (e->callee->decl)
450 			  && e->callee->alias
451 			  && before_inlining_p)
452 			reachable.add (body);
453 		      reachable.add (e->callee);
454 		    }
455 		  enqueue_node (e->callee, &first, &reachable);
456 		}
457 
458 	      /* When inline clone exists, mark body to be preserved so when removing
459 		 offline copy of the function we don't kill it.  */
460 	      if (cnode->global.inlined_to)
461 	        body_needed_for_clonning.add (cnode->decl);
462 
463 	      /* For instrumentation clones we always need original
464 		 function node for proper LTO privatization.  */
465 	      if (cnode->instrumentation_clone
466 		  && cnode->definition)
467 		{
468 		  gcc_assert (cnode->instrumented_version || in_lto_p);
469 		  if (cnode->instrumented_version)
470 		    {
471 		      enqueue_node (cnode->instrumented_version, &first,
472 				    &reachable);
473 		      reachable.add (cnode->instrumented_version);
474 		    }
475 		}
476 
477 	      /* For non-inline clones, force their origins to the boundary and ensure
478 		 that body is not removed.  */
479 	      while (cnode->clone_of)
480 		{
481 		  bool noninline = cnode->clone_of->decl != cnode->decl;
482 		  cnode = cnode->clone_of;
483 		  if (noninline)
484 		    {
485 		      body_needed_for_clonning.add (cnode->decl);
486 		      enqueue_node (cnode, &first, &reachable);
487 		    }
488 		}
489 
490 	    }
491 	  else if (cnode->thunk.thunk_p)
492 	    enqueue_node (cnode->callees->callee, &first, &reachable);
493 
494 	  /* If any reachable function has simd clones, mark them as
495 	     reachable as well.  */
496 	  if (cnode->simd_clones)
497 	    {
498 	      cgraph_node *next;
499 	      for (next = cnode->simd_clones;
500 		   next;
501 		   next = next->simdclone->next_clone)
502 		if (in_boundary_p
503 		    || !reachable.add (next))
504 		  enqueue_node (next, &first, &reachable);
505 	    }
506 	}
507       /* When we see constructor of external variable, keep referred nodes in the
508 	boundary.  This will also hold initializers of the external vars NODE
509 	refers to.  */
510       varpool_node *vnode = dyn_cast <varpool_node *> (node);
511       if (vnode
512 	  && DECL_EXTERNAL (node->decl)
513 	  && !vnode->alias
514 	  && in_boundary_p)
515 	{
516 	  struct ipa_ref *ref = NULL;
517 	  for (int i = 0; node->iterate_reference (i, ref); i++)
518 	    enqueue_node (ref->referred, &first, &reachable);
519 	}
520     }
521 
522   /* Remove unreachable functions.   */
523   for (node = first_function (); node; node = next)
524     {
525       next = next_function (node);
526 
527       /* If node is not needed at all, remove it.  */
528       if (!node->aux)
529 	{
530 	  if (file)
531 	    fprintf (file, " %s", node->dump_name ());
532 	  node->remove ();
533 	  changed = true;
534 	}
535       /* If node is unreachable, remove its body.  */
536       else if (!reachable.contains (node))
537         {
538 	  /* We keep definitions of thunks and aliases in the boundary so
539 	     we can walk to the ultimate alias targets and function symbols
540 	     reliably.  */
541 	  if (node->alias || node->thunk.thunk_p)
542 	    ;
543 	  else if (!body_needed_for_clonning.contains (node->decl)
544 	      && !node->alias && !node->thunk.thunk_p)
545 	    node->release_body ();
546 	  else if (!node->clone_of)
547 	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
548 	  if (node->definition && !node->alias && !node->thunk.thunk_p)
549 	    {
550 	      if (file)
551 		fprintf (file, " %s", node->dump_name ());
552 	      node->body_removed = true;
553 	      node->analyzed = false;
554 	      node->definition = false;
555 	      node->cpp_implicit_alias = false;
556 	      node->alias = false;
557 	      node->transparent_alias = false;
558 	      node->thunk.thunk_p = false;
559 	      node->weakref = false;
560 	      /* After early inlining we drop always_inline attributes on
561 		 bodies of functions that are still referenced (have their
562 		 address taken).  */
563 	      DECL_ATTRIBUTES (node->decl)
564 		= remove_attribute ("always_inline",
565 				    DECL_ATTRIBUTES (node->decl));
566 	      if (!node->in_other_partition)
567 		node->local.local = false;
568 	      node->remove_callees ();
569 	      node->remove_all_references ();
570 	      changed = true;
571 	      if (node->thunk.thunk_p
572 		  && node->thunk.add_pointer_bounds_args)
573 		{
574 		  node->thunk.thunk_p = false;
575 		  node->thunk.add_pointer_bounds_args = false;
576 		}
577 	    }
578 	}
579       else
580 	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
581 		    || in_lto_p || DECL_RESULT (node->decl));
582     }
583 
584   /* Inline clones might be kept around so their materializing allows further
585      cloning.  If the function the clone is inlined into is removed, we need
586      to turn it into normal cone.  */
587   FOR_EACH_FUNCTION (node)
588     {
589       if (node->global.inlined_to
590 	  && !node->callers)
591 	{
592 	  gcc_assert (node->clones);
593 	  node->global.inlined_to = NULL;
594 	  update_inlined_to_pointer (node, node);
595 	}
596       node->aux = NULL;
597     }
598 
599   /* Remove unreachable variables.  */
600   if (file)
601     fprintf (file, "\nReclaiming variables:");
602   for (vnode = first_variable (); vnode; vnode = vnext)
603     {
604       vnext = next_variable (vnode);
605       if (!vnode->aux
606 	  /* For can_refer_decl_in_current_unit_p we want to track for
607 	     all external variables if they are defined in other partition
608 	     or not.  */
609 	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
610 	{
611 	  struct ipa_ref *ref = NULL;
612 
613 	  /* First remove the aliases, so varpool::remove can possibly lookup
614 	     the constructor and save it for future use.  */
615 	  while (vnode->iterate_direct_aliases (0, ref))
616 	    {
617 	      if (file)
618 		fprintf (file, " %s", ref->referred->dump_name ());
619 	      ref->referring->remove ();
620 	    }
621 	  if (file)
622 	    fprintf (file, " %s", vnode->dump_name ());
623           vnext = next_variable (vnode);
624 	  /* Signal removal to the debug machinery.  */
625 	  if (! flag_wpa)
626 	    {
627 	      vnode->definition = false;
628 	      (*debug_hooks->late_global_decl) (vnode->decl);
629 	    }
630 	  vnode->remove ();
631 	  changed = true;
632 	}
633       else if (!reachable.contains (vnode) && !vnode->alias)
634         {
635 	  tree init;
636 	  if (vnode->definition)
637 	    {
638 	      if (file)
639 		fprintf (file, " %s", vnode->name ());
640 	      changed = true;
641 	    }
642 	  /* Keep body if it may be useful for constant folding.  */
643 	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
644 	      && !POINTER_BOUNDS_P (vnode->decl))
645 	    vnode->remove_initializer ();
646 	  else
647 	    DECL_INITIAL (vnode->decl) = init;
648 	  vnode->body_removed = true;
649 	  vnode->definition = false;
650 	  vnode->analyzed = false;
651 	  vnode->aux = NULL;
652 
653 	  vnode->remove_from_same_comdat_group ();
654 
655 	  vnode->remove_all_references ();
656 	}
657       else
658 	vnode->aux = NULL;
659     }
660 
661   /* Now update address_taken flags and try to promote functions to be local.  */
662   if (file)
663     fprintf (file, "\nClearing address taken flags:");
664   FOR_EACH_DEFINED_FUNCTION (node)
665     if (node->address_taken
666 	&& !node->used_from_other_partition)
667       {
668 	if (!node->call_for_symbol_and_aliases
669 	    (has_addr_references_p, NULL, true)
670 	    && (!node->instrumentation_clone
671 		|| !node->instrumented_version
672 		|| !node->instrumented_version->address_taken))
673 	  {
674 	    if (file)
675 	      fprintf (file, " %s", node->name ());
676 	    node->address_taken = false;
677 	    changed = true;
678 	    if (node->local_p ()
679 		/* Virtual functions may be kept in cgraph just because
680 		   of possible later devirtualization.  Do not mark them as
681 		   local too early so we won't optimize them out before
682 		   we are done with polymorphic call analysis.  */
683 		&& (!before_inlining_p
684 		    || !node->call_for_symbol_and_aliases
685 		       (is_indirect_call_target_p, NULL, true)))
686 	      {
687 		node->local.local = true;
688 		if (file)
689 		  fprintf (file, " (local)");
690 	      }
691 	  }
692       }
693   if (file)
694     fprintf (file, "\n");
695 
696   symtab_node::checking_verify_symtab_nodes ();
697 
698   /* If we removed something, perhaps profile could be improved.  */
699   if (changed && (optimize || in_lto_p) && ipa_call_summaries)
700     FOR_EACH_DEFINED_FUNCTION (node)
701       ipa_propagate_frequency (node);
702 
703   timevar_pop (TV_IPA_UNREACHABLE);
704   return changed;
705 }
706 
707 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
708    as needed, also clear EXPLICIT_REFS if the references to given variable
709    do not need to be explicit.  */
710 
711 void
712 process_references (varpool_node *vnode,
713 		    bool *written, bool *address_taken,
714 		    bool *read, bool *explicit_refs)
715 {
716   int i;
717   struct ipa_ref *ref;
718 
719   if (!vnode->all_refs_explicit_p ()
720       || TREE_THIS_VOLATILE (vnode->decl))
721     *explicit_refs = false;
722 
723   for (i = 0; vnode->iterate_referring (i, ref)
724 	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
725     switch (ref->use)
726       {
727       case IPA_REF_ADDR:
728 	*address_taken = true;
729 	break;
730       case IPA_REF_LOAD:
731 	*read = true;
732 	break;
733       case IPA_REF_STORE:
734 	*written = true;
735 	break;
736       case IPA_REF_ALIAS:
737 	process_references (dyn_cast<varpool_node *> (ref->referring), written,
738 			    address_taken, read, explicit_refs);
739 	break;
740       case IPA_REF_CHKP:
741 	gcc_unreachable ();
742       }
743 }
744 
745 /* Set TREE_READONLY bit.  */
746 
747 bool
748 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
749 {
750   TREE_READONLY (vnode->decl) = true;
751   return false;
752 }
753 
754 /* Set writeonly bit and clear the initalizer, since it will not be needed.  */
755 
756 bool
757 set_writeonly_bit (varpool_node *vnode, void *data)
758 {
759   vnode->writeonly = true;
760   if (optimize || in_lto_p)
761     {
762       DECL_INITIAL (vnode->decl) = NULL;
763       if (!vnode->alias)
764 	{
765 	  if (vnode->num_references ())
766 	    *(bool *)data = true;
767 	  vnode->remove_all_references ();
768 	}
769     }
770   return false;
771 }
772 
773 /* Clear addressale bit of VNODE.  */
774 
775 bool
776 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
777 {
778   vnode->address_taken = false;
779   TREE_ADDRESSABLE (vnode->decl) = 0;
780   return false;
781 }
782 
783 /* Discover variables that have no longer address taken or that are read only
784    and update their flags.
785 
786    Return true when unreachable symbol removan should be done.
787 
788    FIXME: This can not be done in between gimplify and omp_expand since
789    readonly flag plays role on what is shared and what is not.  Currently we do
790    this transformation as part of whole program visibility and re-do at
791    ipa-reference pass (to take into account clonning), but it would
792    make sense to do it before early optimizations.  */
793 
794 bool
795 ipa_discover_readonly_nonaddressable_vars (void)
796 {
797   bool remove_p = false;
798   varpool_node *vnode;
799   if (dump_file)
800     fprintf (dump_file, "Clearing variable flags:");
801   FOR_EACH_VARIABLE (vnode)
802     if (!vnode->alias
803 	&& (TREE_ADDRESSABLE (vnode->decl)
804 	    || !vnode->writeonly
805 	    || !TREE_READONLY (vnode->decl)))
806       {
807 	bool written = false;
808 	bool address_taken = false;
809 	bool read = false;
810 	bool explicit_refs = true;
811 
812 	process_references (vnode, &written, &address_taken, &read,
813 			    &explicit_refs);
814 	if (!explicit_refs)
815 	  continue;
816 	if (!address_taken)
817 	  {
818 	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
819 	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
820 	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
821 					        true);
822 	  }
823 	if (!address_taken && !written
824 	    /* Making variable in explicit section readonly can cause section
825 	       type conflict.
826 	       See e.g. gcc.c-torture/compile/pr23237.c */
827 	    && vnode->get_section () == NULL)
828 	  {
829 	    if (!TREE_READONLY (vnode->decl) && dump_file)
830 	      fprintf (dump_file, " %s (read-only)", vnode->name ());
831 	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
832 	  }
833 	if (!vnode->writeonly && !read && !address_taken && written)
834 	  {
835 	    if (dump_file)
836 	      fprintf (dump_file, " %s (write-only)", vnode->name ());
837 	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
838 					        true);
839 	  }
840       }
841   if (dump_file)
842     fprintf (dump_file, "\n");
843   return remove_p;
844 }
845 
846 /* Generate and emit a static constructor or destructor.  WHICH must
847    be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
848    (for chp static vars constructor) or 'B' (for chkp static bounds
849    constructor).  BODY is a STATEMENT_LIST containing GENERIC
850    statements.  PRIORITY is the initialization priority for this
851    constructor or destructor.
852 
853    FINAL specify whether the externally visible name for collect2 should
854    be produced. */
855 
856 static void
857 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
858 {
859   static int counter = 0;
860   char which_buf[16];
861   tree decl, name, resdecl;
862 
863   /* The priority is encoded in the constructor or destructor name.
864      collect2 will sort the names and arrange that they are called at
865      program startup.  */
866   if (final)
867     sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
868   else
869   /* Proudce sane name but one not recognizable by collect2, just for the
870      case we fail to inline the function.  */
871     sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
872   name = get_file_function_name (which_buf);
873 
874   decl = build_decl (input_location, FUNCTION_DECL, name,
875 		     build_function_type_list (void_type_node, NULL_TREE));
876   current_function_decl = decl;
877 
878   resdecl = build_decl (input_location,
879 			RESULT_DECL, NULL_TREE, void_type_node);
880   DECL_ARTIFICIAL (resdecl) = 1;
881   DECL_RESULT (decl) = resdecl;
882   DECL_CONTEXT (resdecl) = decl;
883 
884   allocate_struct_function (decl, false);
885 
886   TREE_STATIC (decl) = 1;
887   TREE_USED (decl) = 1;
888   DECL_ARTIFICIAL (decl) = 1;
889   DECL_IGNORED_P (decl) = 1;
890   DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
891   DECL_SAVED_TREE (decl) = body;
892   if (!targetm.have_ctors_dtors && final)
893     {
894       TREE_PUBLIC (decl) = 1;
895       DECL_PRESERVE_P (decl) = 1;
896     }
897   DECL_UNINLINABLE (decl) = 1;
898 
899   DECL_INITIAL (decl) = make_node (BLOCK);
900   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
901   TREE_USED (DECL_INITIAL (decl)) = 1;
902 
903   DECL_SOURCE_LOCATION (decl) = input_location;
904   cfun->function_end_locus = input_location;
905 
906   switch (which)
907     {
908     case 'I':
909       DECL_STATIC_CONSTRUCTOR (decl) = 1;
910       decl_init_priority_insert (decl, priority);
911       break;
912     case 'P':
913       DECL_STATIC_CONSTRUCTOR (decl) = 1;
914       DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
915 					  NULL,
916 					  NULL_TREE);
917       decl_init_priority_insert (decl, priority);
918       break;
919     case 'B':
920       DECL_STATIC_CONSTRUCTOR (decl) = 1;
921       DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
922 					  NULL,
923 					  NULL_TREE);
924       decl_init_priority_insert (decl, priority);
925       break;
926     case 'D':
927       DECL_STATIC_DESTRUCTOR (decl) = 1;
928       decl_fini_priority_insert (decl, priority);
929       break;
930     default:
931       gcc_unreachable ();
932     }
933 
934   gimplify_function_tree (decl);
935 
936   cgraph_node::add_new_function (decl, false);
937 
938   set_cfun (NULL);
939   current_function_decl = NULL;
940 }
941 
942 /* Generate and emit a static constructor or destructor.  WHICH must
943    be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
944    (for chkp static vars constructor) or 'B' (for chkp static bounds
945    constructor).  BODY is a STATEMENT_LIST containing GENERIC
946    statements.  PRIORITY is the initialization priority for this
947    constructor or destructor.  */
948 
949 void
950 cgraph_build_static_cdtor (char which, tree body, int priority)
951 {
952   cgraph_build_static_cdtor_1 (which, body, priority, false);
953 }
954 
955 /* When target does not have ctors and dtors, we call all constructor
956    and destructor by special initialization/destruction function
957    recognized by collect2.
958 
959    When we are going to build this function, collect all constructors and
960    destructors and turn them into normal functions.  */
961 
962 static void
963 record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
964 {
965   if (DECL_STATIC_CONSTRUCTOR (node->decl))
966     ctors->safe_push (node->decl);
967   if (DECL_STATIC_DESTRUCTOR (node->decl))
968     dtors->safe_push (node->decl);
969   node = cgraph_node::get (node->decl);
970   DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
971 }
972 
973 /* Define global constructors/destructor functions for the CDTORS, of
974    which they are LEN.  The CDTORS are sorted by initialization
975    priority.  If CTOR_P is true, these are constructors; otherwise,
976    they are destructors.  */
977 
978 static void
979 build_cdtor (bool ctor_p, const vec<tree> &cdtors)
980 {
981   size_t i,j;
982   size_t len = cdtors.length ();
983 
984   i = 0;
985   while (i < len)
986     {
987       tree body;
988       tree fn;
989       priority_type priority;
990 
991       priority = 0;
992       body = NULL_TREE;
993       j = i;
994       do
995 	{
996 	  priority_type p;
997 	  fn = cdtors[j];
998 	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
999 	  if (j == i)
1000 	    priority = p;
1001 	  else if (p != priority)
1002 	    break;
1003 	  j++;
1004 	}
1005       while (j < len);
1006 
1007       /* When there is only one cdtor and target supports them, do nothing.  */
1008       if (j == i + 1
1009 	  && targetm.have_ctors_dtors)
1010 	{
1011 	  i++;
1012 	  continue;
1013 	}
1014       /* Find the next batch of constructors/destructors with the same
1015 	 initialization priority.  */
1016       for (;i < j; i++)
1017 	{
1018 	  tree call;
1019 	  fn = cdtors[i];
1020 	  call = build_call_expr (fn, 0);
1021 	  if (ctor_p)
1022 	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
1023 	  else
1024 	    DECL_STATIC_DESTRUCTOR (fn) = 0;
1025 	  /* We do not want to optimize away pure/const calls here.
1026 	     When optimizing, these should be already removed, when not
1027 	     optimizing, we want user to be able to breakpoint in them.  */
1028 	  TREE_SIDE_EFFECTS (call) = 1;
1029 	  append_to_statement_list (call, &body);
1030 	}
1031       gcc_assert (body != NULL_TREE);
1032       /* Generate a function to call all the function of like
1033 	 priority.  */
1034       cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1035     }
1036 }
1037 
1038 /* Comparison function for qsort.  P1 and P2 are actually of type
1039    "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
1040    used to determine the sort order.  */
1041 
1042 static int
1043 compare_ctor (const void *p1, const void *p2)
1044 {
1045   tree f1;
1046   tree f2;
1047   int priority1;
1048   int priority2;
1049 
1050   f1 = *(const tree *)p1;
1051   f2 = *(const tree *)p2;
1052   priority1 = DECL_INIT_PRIORITY (f1);
1053   priority2 = DECL_INIT_PRIORITY (f2);
1054 
1055   if (priority1 < priority2)
1056     return -1;
1057   else if (priority1 > priority2)
1058     return 1;
1059   else
1060     /* Ensure a stable sort.  Constructors are executed in backwarding
1061        order to make LTO initialize braries first.  */
1062     return DECL_UID (f2) - DECL_UID (f1);
1063 }
1064 
1065 /* Comparison function for qsort.  P1 and P2 are actually of type
1066    "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
1067    used to determine the sort order.  */
1068 
1069 static int
1070 compare_dtor (const void *p1, const void *p2)
1071 {
1072   tree f1;
1073   tree f2;
1074   int priority1;
1075   int priority2;
1076 
1077   f1 = *(const tree *)p1;
1078   f2 = *(const tree *)p2;
1079   priority1 = DECL_FINI_PRIORITY (f1);
1080   priority2 = DECL_FINI_PRIORITY (f2);
1081 
1082   if (priority1 < priority2)
1083     return -1;
1084   else if (priority1 > priority2)
1085     return 1;
1086   else
1087     /* Ensure a stable sort.  */
1088     return DECL_UID (f1) - DECL_UID (f2);
1089 }
1090 
1091 /* Generate functions to call static constructors and destructors
1092    for targets that do not support .ctors/.dtors sections.  These
1093    functions have magic names which are detected by collect2.  */
1094 
1095 static void
1096 build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
1097 {
1098   if (!ctors->is_empty ())
1099     {
1100       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1101       ctors->qsort (compare_ctor);
1102       build_cdtor (/*ctor_p=*/true, *ctors);
1103     }
1104 
1105   if (!dtors->is_empty ())
1106     {
1107       gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1108       dtors->qsort (compare_dtor);
1109       build_cdtor (/*ctor_p=*/false, *dtors);
1110     }
1111 }
1112 
1113 /* Look for constructors and destructors and produce function calling them.
1114    This is needed for targets not supporting ctors or dtors, but we perform the
1115    transformation also at linktime to merge possibly numerous
1116    constructors/destructors into single function to improve code locality and
1117    reduce size.  */
1118 
1119 static unsigned int
1120 ipa_cdtor_merge (void)
1121 {
1122   /* A vector of FUNCTION_DECLs declared as static constructors.  */
1123   auto_vec<tree, 20> ctors;
1124   /* A vector of FUNCTION_DECLs declared as static destructors.  */
1125   auto_vec<tree, 20> dtors;
1126   struct cgraph_node *node;
1127   FOR_EACH_DEFINED_FUNCTION (node)
1128     if (DECL_STATIC_CONSTRUCTOR (node->decl)
1129 	|| DECL_STATIC_DESTRUCTOR (node->decl))
1130        record_cdtor_fn (node, &ctors, &dtors);
1131   build_cdtor_fns (&ctors, &dtors);
1132   return 0;
1133 }
1134 
1135 namespace {
1136 
1137 const pass_data pass_data_ipa_cdtor_merge =
1138 {
1139   IPA_PASS, /* type */
1140   "cdtor", /* name */
1141   OPTGROUP_NONE, /* optinfo_flags */
1142   TV_CGRAPHOPT, /* tv_id */
1143   0, /* properties_required */
1144   0, /* properties_provided */
1145   0, /* properties_destroyed */
1146   0, /* todo_flags_start */
1147   0, /* todo_flags_finish */
1148 };
1149 
1150 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1151 {
1152 public:
1153   pass_ipa_cdtor_merge (gcc::context *ctxt)
1154     : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1155 		      NULL, /* generate_summary */
1156 		      NULL, /* write_summary */
1157 		      NULL, /* read_summary */
1158 		      NULL, /* write_optimization_summary */
1159 		      NULL, /* read_optimization_summary */
1160 		      NULL, /* stmt_fixup */
1161 		      0, /* function_transform_todo_flags_start */
1162 		      NULL, /* function_transform */
1163 		      NULL) /* variable_transform */
1164   {}
1165 
1166   /* opt_pass methods: */
1167   virtual bool gate (function *);
1168   virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1169 
1170 }; // class pass_ipa_cdtor_merge
1171 
1172 bool
1173 pass_ipa_cdtor_merge::gate (function *)
1174 {
1175   /* Perform the pass when we have no ctors/dtors support
1176      or at LTO time to merge multiple constructors into single
1177      function.  */
1178   return !targetm.have_ctors_dtors || in_lto_p;
1179 }
1180 
1181 } // anon namespace
1182 
1183 ipa_opt_pass_d *
1184 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1185 {
1186   return new pass_ipa_cdtor_merge (ctxt);
1187 }
1188 
1189 /* Invalid pointer representing BOTTOM for single user dataflow.  */
1190 #define BOTTOM ((cgraph_node *)(size_t) 2)
1191 
1192 /* Meet operation for single user dataflow.
1193    Here we want to associate variables with sigle function that may access it.
1194 
1195    FUNCTION is current single user of a variable, VAR is variable that uses it.
1196    Latttice is stored in SINGLE_USER_MAP.
1197 
1198    We represent:
1199     - TOP by no entry in SIGNLE_USER_MAP
1200     - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1201     - known single user by cgraph pointer in SINGLE_USER_MAP.  */
1202 
1203 cgraph_node *
1204 meet (cgraph_node *function, varpool_node *var,
1205        hash_map<varpool_node *, cgraph_node *> &single_user_map)
1206 {
1207   struct cgraph_node *user, **f;
1208 
1209   if (var->aux == BOTTOM)
1210     return BOTTOM;
1211 
1212   f = single_user_map.get (var);
1213   if (!f)
1214     return function;
1215   user = *f;
1216   if (!function)
1217     return user;
1218   else if (function != user)
1219     return BOTTOM;
1220   else
1221     return function;
1222 }
1223 
1224 /* Propagation step of single-use dataflow.
1225 
1226    Check all uses of VNODE and see if they are used by single function FUNCTION.
1227    SINGLE_USER_MAP represents the dataflow lattice.  */
1228 
1229 cgraph_node *
1230 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1231 		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1232 {
1233   int i;
1234   struct ipa_ref *ref;
1235 
1236   gcc_assert (!vnode->externally_visible);
1237 
1238   /* If node is an alias, first meet with its target.  */
1239   if (vnode->alias)
1240     function = meet (function, vnode->get_alias_target (), single_user_map);
1241 
1242   /* Check all users and see if they correspond to a single function.  */
1243   for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1244     {
1245       struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1246       if (cnode)
1247 	{
1248 	  if (cnode->global.inlined_to)
1249 	    cnode = cnode->global.inlined_to;
1250 	  if (!function)
1251 	    function = cnode;
1252 	  else if (function != cnode)
1253 	    function = BOTTOM;
1254 	}
1255       else
1256 	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1257 			 single_user_map);
1258     }
1259   return function;
1260 }
1261 
1262 /* Pass setting used_by_single_function flag.
1263    This flag is set on variable when there is only one function that may
1264    possibly referr to it.  */
1265 
1266 static unsigned int
1267 ipa_single_use (void)
1268 {
1269   varpool_node *first = (varpool_node *) (void *) 1;
1270   varpool_node *var;
1271   hash_map<varpool_node *, cgraph_node *> single_user_map;
1272 
1273   FOR_EACH_DEFINED_VARIABLE (var)
1274     if (!var->all_refs_explicit_p ())
1275       var->aux = BOTTOM;
1276     else
1277       {
1278 	/* Enqueue symbol for dataflow.  */
1279         var->aux = first;
1280 	first = var;
1281       }
1282 
1283   /* The actual dataflow.  */
1284 
1285   while (first != (void *) 1)
1286     {
1287       cgraph_node *user, *orig_user, **f;
1288 
1289       var = first;
1290       first = (varpool_node *)first->aux;
1291 
1292       f = single_user_map.get (var);
1293       if (f)
1294 	orig_user = *f;
1295       else
1296 	orig_user = NULL;
1297       user = propagate_single_user (var, orig_user, single_user_map);
1298 
1299       gcc_checking_assert (var->aux != BOTTOM);
1300 
1301       /* If user differs, enqueue all references.  */
1302       if (user != orig_user)
1303 	{
1304 	  unsigned int i;
1305 	  ipa_ref *ref;
1306 
1307 	  single_user_map.put (var, user);
1308 
1309 	  /* Enqueue all aliases for re-processing.  */
1310 	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1311 	    if (!ref->referring->aux)
1312 	      {
1313 		ref->referring->aux = first;
1314 		first = dyn_cast <varpool_node *> (ref->referring);
1315 	      }
1316 	  /* Enqueue all users for re-processing.  */
1317 	  for (i = 0; var->iterate_reference (i, ref); i++)
1318 	    if (!ref->referred->aux
1319 	        && ref->referred->definition
1320 		&& is_a <varpool_node *> (ref->referred))
1321 	      {
1322 		ref->referred->aux = first;
1323 		first = dyn_cast <varpool_node *> (ref->referred);
1324 	      }
1325 
1326 	  /* If user is BOTTOM, just punt on this var.  */
1327 	  if (user == BOTTOM)
1328 	    var->aux = BOTTOM;
1329 	  else
1330 	    var->aux = NULL;
1331 	}
1332       else
1333 	var->aux = NULL;
1334     }
1335 
1336   FOR_EACH_DEFINED_VARIABLE (var)
1337     {
1338       if (var->aux != BOTTOM)
1339 	{
1340 	  /* Not having the single user known means that the VAR is
1341 	     unreachable.  Either someone forgot to remove unreachable
1342 	     variables or the reachability here is wrong.  */
1343 
1344 	  gcc_checking_assert (single_user_map.get (var));
1345 
1346 	  if (dump_file)
1347 	    {
1348 	      fprintf (dump_file, "Variable %s is used by single function\n",
1349 		       var->dump_name ());
1350 	    }
1351 	  var->used_by_single_function = true;
1352 	}
1353       var->aux = NULL;
1354     }
1355   return 0;
1356 }
1357 
1358 namespace {
1359 
1360 const pass_data pass_data_ipa_single_use =
1361 {
1362   IPA_PASS, /* type */
1363   "single-use", /* name */
1364   OPTGROUP_NONE, /* optinfo_flags */
1365   TV_CGRAPHOPT, /* tv_id */
1366   0, /* properties_required */
1367   0, /* properties_provided */
1368   0, /* properties_destroyed */
1369   0, /* todo_flags_start */
1370   0, /* todo_flags_finish */
1371 };
1372 
1373 class pass_ipa_single_use : public ipa_opt_pass_d
1374 {
1375 public:
1376   pass_ipa_single_use (gcc::context *ctxt)
1377     : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1378 		      NULL, /* generate_summary */
1379 		      NULL, /* write_summary */
1380 		      NULL, /* read_summary */
1381 		      NULL, /* write_optimization_summary */
1382 		      NULL, /* read_optimization_summary */
1383 		      NULL, /* stmt_fixup */
1384 		      0, /* function_transform_todo_flags_start */
1385 		      NULL, /* function_transform */
1386 		      NULL) /* variable_transform */
1387   {}
1388 
1389   /* opt_pass methods: */
1390   virtual unsigned int execute (function *) { return ipa_single_use (); }
1391 
1392 }; // class pass_ipa_single_use
1393 
1394 } // anon namespace
1395 
1396 ipa_opt_pass_d *
1397 make_pass_ipa_single_use (gcc::context *ctxt)
1398 {
1399   return new pass_ipa_single_use (ctxt);
1400 }
1401 
1402 /* Materialize all clones.  */
1403 
1404 namespace {
1405 
1406 const pass_data pass_data_materialize_all_clones =
1407 {
1408   SIMPLE_IPA_PASS, /* type */
1409   "materialize-all-clones", /* name */
1410   OPTGROUP_NONE, /* optinfo_flags */
1411   TV_IPA_OPT, /* tv_id */
1412   0, /* properties_required */
1413   0, /* properties_provided */
1414   0, /* properties_destroyed */
1415   0, /* todo_flags_start */
1416   0, /* todo_flags_finish */
1417 };
1418 
1419 class pass_materialize_all_clones : public simple_ipa_opt_pass
1420 {
1421 public:
1422   pass_materialize_all_clones (gcc::context *ctxt)
1423     : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1424   {}
1425 
1426   /* opt_pass methods: */
1427   virtual unsigned int execute (function *)
1428     {
1429       symtab->materialize_all_clones ();
1430       return 0;
1431     }
1432 
1433 }; // class pass_materialize_all_clones
1434 
1435 } // anon namespace
1436 
1437 simple_ipa_opt_pass *
1438 make_pass_materialize_all_clones (gcc::context *ctxt)
1439 {
1440   return new pass_materialize_all_clones (ctxt);
1441 }
1442