1 /* Basic IPA utilities for type inheritance graph construction and
2    devirtualization.
3    Copyright (C) 2013-2014 Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* Brief vocalburary:
23      ODR = One Definition Rule
24         In short, the ODR states that:
25 	1 In any translation unit, a template, type, function, or object can
26 	  have no more than one definition. Some of these can have any number
27 	  of declarations. A definition provides an instance.
28         2 In the entire program, an object or non-inline function cannot have
29 	  more than one definition; if an object or function is used, it must
30 	  have exactly one definition. You can declare an object or function
31 	  that is never used, in which case you don't have to provide
32 	  a definition. In no event can there be more than one definition.
33         3 Some things, like types, templates, and extern inline functions, can
34 	  be defined in more than one translation unit. For a given entity,
35 	  each definition must be the same. Non-extern objects and functions
36 	  in different translation units are different entities, even if their
37 	  names and types are the same.
38 
39      OTR = OBJ_TYPE_REF
40        This is the Gimple representation of type information of a polymorphic call.
41        It contains two parameters:
42 	 otr_type is a type of class whose method is called.
43 	 otr_token is the index into virtual table where address is taken.
44 
45      BINFO
46        This is the type inheritance information attached to each tree
47        RECORD_TYPE by the C++ frotend.  It provides information about base
48        types and virtual tables.
49 
50        BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51        BINFO also links to its type by BINFO_TYPE and to the virtual table by
52        BINFO_VTABLE.
53 
54        Base types of a given type are enumerated by BINFO_BASE_BINFO
55        vector.  Members of this vectors are not BINFOs associated
56        with a base type.  Rather they are new copies of BINFOs
57        (base BINFOs). Their virtual tables may differ from
58        virtual table of the base type.  Also BINFO_OFFSET specifies
59        offset of the base within the type.
60 
61        In the case of single inheritance, the virtual table is shared
62        and BINFO_VTABLE of base BINFO is NULL.  In the case of multiple
63        inheritance the individual virtual tables are pointer to by
64        BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65        binfo associated to the base type).
66 
67        BINFO lookup for a given base type and offset can be done by
68        get_binfo_at_offset.  It returns proper BINFO whose virtual table
69        can be used for lookup of virtual methods associated with the
70        base type.
71 
72      token
73        This is an index of virtual method in virtual table associated
74        to the type defining it. Token can be looked up from OBJ_TYPE_REF
75        or from DECL_VINDEX of a given virtual table.
76 
77      polymorphic (indirect) call
78        This is callgraph represention of virtual method call.  Every
79        polymorphic call contains otr_type and otr_token taken from
80        original OBJ_TYPE_REF at callgraph construction time.
81 
82    What we do here:
83 
84    build_type_inheritance_graph triggers a construction of the type inheritance
85    graph.
86 
87      We reconstruct it based on types of methods we see in the unit.
88      This means that the graph is not complete. Types with no methods are not
89      inserted into the graph.  Also types without virtual methods are not
90      represented at all, though it may be easy to add this.
91 
92      The inheritance graph is represented as follows:
93 
94        Vertices are structures odr_type.  Every odr_type may correspond
95        to one or more tree type nodes that are equivalent by ODR rule.
96        (the multiple type nodes appear only with linktime optimization)
97 
98        Edges are represented by odr_type->base and odr_type->derived_types.
99        At the moment we do not track offsets of types for multiple inheritance.
100        Adding this is easy.
101 
102   possible_polymorphic_call_targets returns, given an parameters found in
103   indirect polymorphic edge all possible polymorphic call targets of the call.
104 
105   pass_ipa_devirt performs simple speculative devirtualization.
106 */
107 
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 
133 static bool odr_violation_reported = false;
134 
135 /* Dummy polymorphic call context.  */
136 
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138    = {0, NULL, false, true};
139 
140 /* Pointer set of all call targets appearing in the cache.  */
141 static pointer_set_t *cached_polymorphic_call_targets;
142 
143 /* The node of type inheritance graph.  For each type unique in
144    One Defintion Rule (ODR) sense, we produce one node linking all
145    main variants of types equivalent to it, bases and derived types.  */
146 
147 struct GTY(()) odr_type_d
148 {
149   /* leader type.  */
150   tree type;
151   /* All bases.  */
152   vec<odr_type> GTY((skip)) bases;
153   /* All derrived types with virtual methods seen in unit.  */
154   vec<odr_type> GTY((skip)) derived_types;
155 
156   /* All equivalent types, if more than one.  */
157   vec<tree, va_gc> *types;
158   /* Set of all equivalent types, if NON-NULL.  */
159   pointer_set_t * GTY((skip)) types_set;
160 
161   /* Unique ID indexing the type in odr_types array.  */
162   int id;
163   /* Is it in anonymous namespace? */
164   bool anonymous_namespace;
165 };
166 
167 
168 /* Return true if BINFO corresponds to a type with virtual methods.
169 
170    Every type has several BINFOs.  One is the BINFO associated by the type
171    while other represents bases of derived types.  The BINFOs representing
172    bases do not have BINFO_VTABLE pointer set when this is the single
173    inheritance (because vtables are shared).  Look up the BINFO of type
174    and check presence of its vtable.  */
175 
176 static inline bool
polymorphic_type_binfo_p(tree binfo)177 polymorphic_type_binfo_p (tree binfo)
178 {
179   /* See if BINFO's type has an virtual table associtated with it.  */
180   return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
181 }
182 
183 /* One Definition Rule hashtable helpers.  */
184 
185 struct odr_hasher
186 {
187   typedef odr_type_d value_type;
188   typedef union tree_node compare_type;
189   static inline hashval_t hash (const value_type *);
190   static inline bool equal (const value_type *, const compare_type *);
191   static inline void remove (value_type *);
192 };
193 
194 /* Produce hash based on type name.  */
195 
196 hashval_t
hash_type_name(tree t)197 hash_type_name (tree t)
198 {
199   gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
200 
201   /* If not in LTO, all main variants are unique, so we can do
202      pointer hash.  */
203   if (!in_lto_p)
204     return htab_hash_pointer (t);
205 
206   /* Anonymous types are unique.  */
207   if (type_in_anonymous_namespace_p (t))
208     return htab_hash_pointer (t);
209 
210   /* For polymorphic types, we can simply hash the virtual table.  */
211   if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
212     {
213       tree v = BINFO_VTABLE (TYPE_BINFO (t));
214       hashval_t hash = 0;
215 
216       if (TREE_CODE (v) == POINTER_PLUS_EXPR)
217 	{
218 	  hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
219 	  v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
220 	}
221 
222       v = DECL_ASSEMBLER_NAME (v);
223       hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
224       return hash;
225     }
226 
227   /* Rest is not implemented yet.  */
228   gcc_unreachable ();
229 }
230 
231 /* Return the computed hashcode for ODR_TYPE.  */
232 
233 inline hashval_t
hash(const value_type * odr_type)234 odr_hasher::hash (const value_type *odr_type)
235 {
236   return hash_type_name (odr_type->type);
237 }
238 
239 /* Compare types T1 and T2 and return true if they are
240    equivalent.  */
241 
242 inline bool
equal(const value_type * t1,const compare_type * ct2)243 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
244 {
245   tree t2 = const_cast <tree> (ct2);
246 
247   gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
248   if (t1->type == t2)
249     return true;
250   if (!in_lto_p)
251     return false;
252   return types_same_for_odr (t1->type, t2);
253 }
254 
255 /* Free ODR type V.  */
256 
257 inline void
remove(value_type * v)258 odr_hasher::remove (value_type *v)
259 {
260   v->bases.release ();
261   v->derived_types.release ();
262   if (v->types_set)
263     pointer_set_destroy (v->types_set);
264   ggc_free (v);
265 }
266 
267 /* ODR type hash used to lookup ODR type based on tree type node.  */
268 
269 typedef hash_table <odr_hasher> odr_hash_type;
270 static odr_hash_type odr_hash;
271 
272 /* ODR types are also stored into ODR_TYPE vector to allow consistent
273    walking.  Bases appear before derived types.  Vector is garbage collected
274    so we won't end up visiting empty types.  */
275 
276 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
277 #define odr_types (*odr_types_ptr)
278 
279 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
280    from VAL->type.  This may happen in LTO where tree merging did not merge
281    all variants of the same type.  It may or may not mean the ODR violation.
282    Add it to the list of duplicates and warn on some violations.  */
283 
284 static void
add_type_duplicate(odr_type val,tree type)285 add_type_duplicate (odr_type val, tree type)
286 {
287   if (!val->types_set)
288     val->types_set = pointer_set_create ();
289 
290   /* See if this duplicate is new.  */
291   if (!pointer_set_insert (val->types_set, type))
292     {
293       bool merge = true;
294       bool base_mismatch = false;
295       gcc_assert (in_lto_p);
296       vec_safe_push (val->types, type);
297       unsigned int i,j;
298 
299       /* First we compare memory layout.  */
300       if (!types_compatible_p (val->type, type))
301 	{
302 	  merge = false;
303 	  odr_violation_reported = true;
304 	  if (BINFO_VTABLE (TYPE_BINFO (val->type))
305 	      && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
306 			     "type %qD violates one definition rule  ",
307 			     type))
308 	    inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
309 		    "a type with the same name but different layout is "
310 		    "defined in another translation unit");
311 	  if (cgraph_dump_file)
312 	    {
313 	      fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
314 
315 	      print_node (cgraph_dump_file, "", val->type, 0);
316 	      putc ('\n',cgraph_dump_file);
317 	      print_node (cgraph_dump_file, "", type, 0);
318 	      putc ('\n',cgraph_dump_file);
319 	    }
320 	}
321 
322       /* Next sanity check that bases are the same.  If not, we will end
323 	 up producing wrong answers.  */
324       for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
325 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
326 	  {
327 	    odr_type base = get_odr_type
328 			       (BINFO_TYPE
329 				  (BINFO_BASE_BINFO (TYPE_BINFO (type),
330 						     i)),
331 				true);
332 	    if (val->bases.length () <= j || val->bases[j] != base)
333 	      base_mismatch = true;
334 	    j++;
335 	  }
336       if (base_mismatch)
337 	{
338 	  merge = false;
339 	  odr_violation_reported = true;
340 
341 	  if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
342 			  "type %qD violates one definition rule  ",
343 			  type))
344 	    inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
345 		    "a type with the same name but different bases is "
346 		    "defined in another translation unit");
347 	  if (cgraph_dump_file)
348 	    {
349 	      fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
350 
351 	      print_node (cgraph_dump_file, "", val->type, 0);
352 	      putc ('\n',cgraph_dump_file);
353 	      print_node (cgraph_dump_file, "", type, 0);
354 	      putc ('\n',cgraph_dump_file);
355 	    }
356 	}
357 
358       /* Regularize things a little.  During LTO same types may come with
359 	 different BINFOs.  Either because their virtual table was
360 	 not merged by tree merging and only later at decl merging or
361 	 because one type comes with external vtable, while other
362 	 with internal.  We want to merge equivalent binfos to conserve
363 	 memory and streaming overhead.
364 
365 	 The external vtables are more harmful: they contain references
366 	 to external declarations of methods that may be defined in the
367 	 merged LTO unit.  For this reason we absolutely need to remove
368 	 them and replace by internal variants. Not doing so will lead
369          to incomplete answers from possible_polymorphic_call_targets.  */
370       if (!flag_ltrans && merge)
371 	{
372 	  tree master_binfo = TYPE_BINFO (val->type);
373 	  tree v1 = BINFO_VTABLE (master_binfo);
374 	  tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
375 
376 	  if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
377 	    {
378 	      gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
379 			  && operand_equal_p (TREE_OPERAND (v1, 1),
380 					      TREE_OPERAND (v2, 1), 0));
381 	      v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
382 	      v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
383 	    }
384 	  gcc_assert (DECL_ASSEMBLER_NAME (v1)
385 		      == DECL_ASSEMBLER_NAME (v2));
386 
387 	  if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
388 	    {
389 	      unsigned int i;
390 
391 	      TYPE_BINFO (val->type) = TYPE_BINFO (type);
392 	      for (i = 0; i < val->types->length (); i++)
393 		{
394 		  if (TYPE_BINFO ((*val->types)[i])
395 		      == master_binfo)
396 		    TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
397 		}
398 	    }
399 	  else
400 	    TYPE_BINFO (type) = master_binfo;
401 	}
402     }
403 }
404 
405 /* Get ODR type hash entry for TYPE.  If INSERT is true, create
406    possibly new entry.  */
407 
408 odr_type
get_odr_type(tree type,bool insert)409 get_odr_type (tree type, bool insert)
410 {
411   odr_type_d **slot;
412   odr_type val;
413   hashval_t hash;
414 
415   type = TYPE_MAIN_VARIANT (type);
416   gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
417   hash = hash_type_name (type);
418   slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
419   if (!slot)
420     return NULL;
421 
422   /* See if we already have entry for type.  */
423   if (*slot)
424     {
425       val = *slot;
426 
427       /* With LTO we need to support multiple tree representation of
428 	 the same ODR type.  */
429       if (val->type != type)
430         add_type_duplicate (val, type);
431     }
432   else
433     {
434       tree binfo = TYPE_BINFO (type);
435       unsigned int i;
436 
437       val = ggc_alloc_cleared_odr_type_d ();
438       val->type = type;
439       val->bases = vNULL;
440       val->derived_types = vNULL;
441       val->anonymous_namespace = type_in_anonymous_namespace_p (type);
442       *slot = val;
443       for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
444 	/* For now record only polymorphic types. other are
445 	   pointless for devirtualization and we can not precisely
446 	   determine ODR equivalency of these during LTO.  */
447 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
448 	  {
449 	    odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
450 									i)),
451 					  true);
452 	    base->derived_types.safe_push (val);
453 	    val->bases.safe_push (base);
454 	  }
455       /* First record bases, then add into array so ids are increasing.  */
456       if (odr_types_ptr)
457         val->id = odr_types.length ();
458       vec_safe_push (odr_types_ptr, val);
459     }
460   return val;
461 }
462 
463 /* Dump ODR type T and all its derrived type.  INDENT specify indentation for
464    recusive printing.  */
465 
466 static void
467 dump_odr_type (FILE *f, odr_type t, int indent=0)
468 {
469   unsigned int i;
470   fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
471   print_generic_expr (f, t->type, TDF_SLIM);
472   fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
473   if (TYPE_NAME (t->type))
474     {
475       fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
476 	       DECL_SOURCE_FILE (TYPE_NAME (t->type)),
477 	       DECL_SOURCE_LINE (TYPE_NAME (t->type)));
478     }
479   if (t->bases.length ())
480     {
481       fprintf (f, "%*s base odr type ids: ", indent * 2, "");
482       for (i = 0; i < t->bases.length (); i++)
483 	fprintf (f, " %i", t->bases[i]->id);
484       fprintf (f, "\n");
485     }
486   if (t->derived_types.length ())
487     {
488       fprintf (f, "%*s derived types:\n", indent * 2, "");
489       for (i = 0; i < t->derived_types.length (); i++)
490         dump_odr_type (f, t->derived_types[i], indent + 1);
491     }
492   fprintf (f, "\n");
493 }
494 
495 /* Dump the type inheritance graph.  */
496 
497 static void
dump_type_inheritance_graph(FILE * f)498 dump_type_inheritance_graph (FILE *f)
499 {
500   unsigned int i;
501   if (!odr_types_ptr)
502     return;
503   fprintf (f, "\n\nType inheritance graph:\n");
504   for (i = 0; i < odr_types.length (); i++)
505     {
506       if (odr_types[i]->bases.length () == 0)
507 	dump_odr_type (f, odr_types[i]);
508     }
509   for (i = 0; i < odr_types.length (); i++)
510     {
511       if (odr_types[i]->types && odr_types[i]->types->length ())
512 	{
513 	  unsigned int j;
514 	  fprintf (f, "Duplicate tree types for odr type %i\n", i);
515 	  print_node (f, "", odr_types[i]->type, 0);
516 	  for (j = 0; j < odr_types[i]->types->length (); j++)
517 	    {
518 	      tree t;
519 	      fprintf (f, "duplicate #%i\n", j);
520 	      print_node (f, "", (*odr_types[i]->types)[j], 0);
521 	      t = (*odr_types[i]->types)[j];
522 	      while (TYPE_P (t) && TYPE_CONTEXT (t))
523 		{
524 		  t = TYPE_CONTEXT (t);
525 	          print_node (f, "", t, 0);
526 		}
527 	      putc ('\n',f);
528 	    }
529 	}
530     }
531 }
532 
533 /* Given method type T, return type of class it belongs to.
534    Lookup this pointer and get its type.    */
535 
536 tree
method_class_type(tree t)537 method_class_type (tree t)
538 {
539   tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
540   gcc_assert (TREE_CODE (t) == METHOD_TYPE);
541 
542   return TREE_TYPE (first_parm_type);
543 }
544 
545 /* Initialize IPA devirt and build inheritance tree graph.  */
546 
547 void
build_type_inheritance_graph(void)548 build_type_inheritance_graph (void)
549 {
550   struct symtab_node *n;
551   FILE *inheritance_dump_file;
552   int flags;
553 
554   if (odr_hash.is_created ())
555     return;
556   timevar_push (TV_IPA_INHERITANCE);
557   inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
558   odr_hash.create (23);
559 
560   /* We reconstruct the graph starting of types of all methods seen in the
561      the unit.  */
562   FOR_EACH_SYMBOL (n)
563     if (is_a <cgraph_node> (n)
564 	&& DECL_VIRTUAL_P (n->decl)
565 	&& symtab_real_symbol_p (n))
566       get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
567 
568     /* Look also for virtual tables of types that do not define any methods.
569 
570        We need it in a case where class B has virtual base of class A
571        re-defining its virtual method and there is class C with no virtual
572        methods with B as virtual base.
573 
574        Here we output B's virtual method in two variant - for non-virtual
575        and virtual inheritance.  B's virtual table has non-virtual version,
576        while C's has virtual.
577 
578        For this reason we need to know about C in order to include both
579        variants of B.  More correctly, record_target_from_binfo should
580        add both variants of the method when walking B, but we have no
581        link in between them.
582 
583        We rely on fact that either the method is exported and thus we
584        assume it is called externally or C is in anonymous namespace and
585        thus we will see the vtable.  */
586 
587     else if (is_a <varpool_node> (n)
588 	     && DECL_VIRTUAL_P (n->decl)
589 	     && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
590 	     && TYPE_BINFO (DECL_CONTEXT (n->decl))
591 	     && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
592       get_odr_type (DECL_CONTEXT (n->decl), true);
593   if (inheritance_dump_file)
594     {
595       dump_type_inheritance_graph (inheritance_dump_file);
596       dump_end (TDI_inheritance, inheritance_dump_file);
597     }
598   timevar_pop (TV_IPA_INHERITANCE);
599 }
600 
601 /* If TARGET has associated node, record it in the NODES array.
602    CAN_REFER specify if program can refer to the target directly.
603    if TARGET is unknown (NULL) or it can not be inserted (for example because
604    its body was already removed and there is no way to refer to it), clear
605    COMPLETEP.  */
606 
607 static void
maybe_record_node(vec<cgraph_node * > & nodes,tree target,pointer_set_t * inserted,bool can_refer,bool * completep)608 maybe_record_node (vec <cgraph_node *> &nodes,
609 		   tree target, pointer_set_t *inserted,
610 		   bool can_refer,
611 		   bool *completep)
612 {
613   struct cgraph_node *target_node;
614   enum built_in_function fcode;
615 
616   if (!can_refer)
617     {
618       /* The only case when method of anonymous namespace becomes unreferable
619 	 is when we completely optimized it out.  */
620       if (flag_ltrans
621 	  || !target
622           || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
623 	*completep = false;
624       return;
625     }
626 
627   if (!target
628       /* Those are used to mark impossible scenarios.  */
629       || (fcode = DECL_FUNCTION_CODE (target))
630 	  == BUILT_IN_UNREACHABLE
631       || fcode == BUILT_IN_TRAP)
632     return;
633 
634   target_node = cgraph_get_node (target);
635 
636   if (target_node != NULL
637       && (TREE_PUBLIC (target)
638 	  || target_node->definition)
639       && symtab_real_symbol_p (target_node))
640     {
641       gcc_assert (!target_node->global.inlined_to);
642       gcc_assert (symtab_real_symbol_p (target_node));
643       if (!pointer_set_insert (inserted, target))
644 	{
645 	  pointer_set_insert (cached_polymorphic_call_targets,
646 			      target_node);
647 	  nodes.safe_push (target_node);
648 	}
649     }
650   else if (completep
651 	   && !type_in_anonymous_namespace_p
652 		 (method_class_type (TREE_TYPE (target))))
653     *completep = false;
654 }
655 
656 /* See if BINFO's type match OUTER_TYPE.  If so, lookup
657    BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
658    method in vtable and insert method to NODES array.
659    Otherwise recurse to base BINFOs.
660    This match what get_binfo_at_offset does, but with offset
661    being unknown.
662 
663    TYPE_BINFOS is a stack of BINFOS of types with defined
664    virtual table seen on way from class type to BINFO.
665 
666    MATCHED_VTABLES tracks virtual tables we already did lookup
667    for virtual function in. INSERTED tracks nodes we already
668    inserted.
669 
670    ANONYMOUS is true if BINFO is part of anonymous namespace.
671 
672    Clear COMPLETEP when we hit unreferable target.
673   */
674 
675 static void
record_target_from_binfo(vec<cgraph_node * > & nodes,tree binfo,tree otr_type,vec<tree> & type_binfos,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,pointer_set_t * inserted,pointer_set_t * matched_vtables,bool anonymous,bool * completep)676 record_target_from_binfo (vec <cgraph_node *> &nodes,
677 			  tree binfo,
678 			  tree otr_type,
679 			  vec <tree> &type_binfos,
680 			  HOST_WIDE_INT otr_token,
681 			  tree outer_type,
682 			  HOST_WIDE_INT offset,
683 			  pointer_set_t *inserted,
684 			  pointer_set_t *matched_vtables,
685 			  bool anonymous,
686 			  bool *completep)
687 {
688   tree type = BINFO_TYPE (binfo);
689   int i;
690   tree base_binfo;
691 
692 
693   if (BINFO_VTABLE (binfo))
694     type_binfos.safe_push (binfo);
695   if (types_same_for_odr (type, outer_type))
696     {
697       int i;
698       tree type_binfo = NULL;
699 
700       /* Lookup BINFO with virtual table.  For normal types it is always last
701 	 binfo on stack.  */
702       for (i = type_binfos.length () - 1; i >= 0; i--)
703 	if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
704 	  {
705 	    type_binfo = type_binfos[i];
706 	    break;
707 	  }
708       if (BINFO_VTABLE (binfo))
709 	type_binfos.pop ();
710       /* If this is duplicated BINFO for base shared by virtual inheritance,
711 	 we may not have its associated vtable.  This is not a problem, since
712 	 we will walk it on the other path.  */
713       if (!type_binfo)
714 	return;
715       tree inner_binfo = get_binfo_at_offset (type_binfo,
716 					      offset, otr_type);
717       if (!inner_binfo)
718 	{
719 	  gcc_assert (odr_violation_reported);
720 	  return;
721 	}
722       /* For types in anonymous namespace first check if the respective vtable
723 	 is alive. If not, we know the type can't be called.  */
724       if (!flag_ltrans && anonymous)
725 	{
726 	  tree vtable = BINFO_VTABLE (inner_binfo);
727 	  varpool_node *vnode;
728 
729 	  if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
730 	    vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
731 	  vnode = varpool_get_node (vtable);
732 	  if (!vnode || !vnode->definition)
733 	    return;
734 	}
735       gcc_assert (inner_binfo);
736       if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
737 	{
738 	  bool can_refer;
739 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
740 							  inner_binfo,
741 							  &can_refer);
742 	  maybe_record_node (nodes, target, inserted, can_refer, completep);
743 	}
744       return;
745     }
746 
747   /* Walk bases.  */
748   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
749     /* Walking bases that have no virtual method is pointless excercise.  */
750     if (polymorphic_type_binfo_p (base_binfo))
751       record_target_from_binfo (nodes, base_binfo, otr_type,
752 				type_binfos,
753 				otr_token, outer_type, offset, inserted,
754 				matched_vtables, anonymous, completep);
755   if (BINFO_VTABLE (binfo))
756     type_binfos.pop ();
757 }
758 
759 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
760    of TYPE, insert them to NODES, recurse into derived nodes.
761    INSERTED is used to avoid duplicate insertions of methods into NODES.
762    MATCHED_VTABLES are used to avoid duplicate walking vtables.
763    Clear COMPLETEP if unreferable target is found.  */
764 
765 static void
possible_polymorphic_call_targets_1(vec<cgraph_node * > & nodes,pointer_set_t * inserted,pointer_set_t * matched_vtables,tree otr_type,odr_type type,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,bool * completep)766 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
767 				     pointer_set_t *inserted,
768 				     pointer_set_t *matched_vtables,
769 				     tree otr_type,
770 				     odr_type type,
771 				     HOST_WIDE_INT otr_token,
772 				     tree outer_type,
773 				     HOST_WIDE_INT offset,
774 				     bool *completep)
775 {
776   tree binfo = TYPE_BINFO (type->type);
777   unsigned int i;
778   vec <tree> type_binfos = vNULL;
779 
780   record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
781 			    outer_type, offset,
782 			    inserted, matched_vtables,
783 			    type->anonymous_namespace, completep);
784   type_binfos.release ();
785   for (i = 0; i < type->derived_types.length (); i++)
786     possible_polymorphic_call_targets_1 (nodes, inserted,
787 					 matched_vtables,
788 					 otr_type,
789 					 type->derived_types[i],
790 					 otr_token, outer_type, offset, completep);
791 }
792 
793 /* Cache of queries for polymorphic call targets.
794 
795    Enumerating all call targets may get expensive when there are many
796    polymorphic calls in the program, so we memoize all the previous
797    queries and avoid duplicated work.  */
798 
799 struct polymorphic_call_target_d
800 {
801   HOST_WIDE_INT otr_token;
802   ipa_polymorphic_call_context context;
803   odr_type type;
804   vec <cgraph_node *> targets;
805   int nonconstruction_targets;
806   bool complete;
807 };
808 
809 /* Polymorphic call target cache helpers.  */
810 
811 struct polymorphic_call_target_hasher
812 {
813   typedef polymorphic_call_target_d value_type;
814   typedef polymorphic_call_target_d compare_type;
815   static inline hashval_t hash (const value_type *);
816   static inline bool equal (const value_type *, const compare_type *);
817   static inline void remove (value_type *);
818 };
819 
820 /* Return the computed hashcode for ODR_QUERY.  */
821 
822 inline hashval_t
hash(const value_type * odr_query)823 polymorphic_call_target_hasher::hash (const value_type *odr_query)
824 {
825   hashval_t hash;
826 
827   hash = iterative_hash_host_wide_int
828 	  (odr_query->otr_token,
829 	   odr_query->type->id);
830   hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
831 				   hash);
832   hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
833   return iterative_hash_hashval_t
834 	    (((int)odr_query->context.maybe_in_construction << 1)
835 	     | (int)odr_query->context.maybe_derived_type, hash);
836 }
837 
838 /* Compare cache entries T1 and T2.  */
839 
840 inline bool
equal(const value_type * t1,const compare_type * t2)841 polymorphic_call_target_hasher::equal (const value_type *t1,
842 				       const compare_type *t2)
843 {
844   return (t1->type == t2->type && t1->otr_token == t2->otr_token
845 	  && t1->context.offset == t2->context.offset
846 	  && t1->context.outer_type == t2->context.outer_type
847 	  && t1->context.maybe_in_construction
848 	      == t2->context.maybe_in_construction
849 	  && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
850 }
851 
852 /* Remove entry in polymorphic call target cache hash.  */
853 
854 inline void
remove(value_type * v)855 polymorphic_call_target_hasher::remove (value_type *v)
856 {
857   v->targets.release ();
858   free (v);
859 }
860 
861 /* Polymorphic call target query cache.  */
862 
863 typedef hash_table <polymorphic_call_target_hasher>
864    polymorphic_call_target_hash_type;
865 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
866 
867 /* Destroy polymorphic call target query cache.  */
868 
869 static void
free_polymorphic_call_targets_hash()870 free_polymorphic_call_targets_hash ()
871 {
872   if (cached_polymorphic_call_targets)
873     {
874       polymorphic_call_target_hash.dispose ();
875       pointer_set_destroy (cached_polymorphic_call_targets);
876       cached_polymorphic_call_targets = NULL;
877     }
878 }
879 
880 /* When virtual function is removed, we may need to flush the cache.  */
881 
882 static void
devirt_node_removal_hook(struct cgraph_node * n,void * d ATTRIBUTE_UNUSED)883 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
884 {
885   if (cached_polymorphic_call_targets
886       && pointer_set_contains (cached_polymorphic_call_targets, n))
887     free_polymorphic_call_targets_hash ();
888 }
889 
890 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
891    is contained at CONTEXT->OFFSET.  Walk the memory representation of
892    CONTEXT->OUTER_TYPE and find the outermost class type that match
893    EXPECTED_TYPE or contain EXPECTED_TYPE as a base.  Update CONTEXT
894    to represent it.
895 
896    For example when CONTEXT represents type
897    class A
898      {
899        int a;
900        class B b;
901      }
902    and we look for type at offset sizeof(int), we end up with B and offset 0.
903    If the same is produced by multiple inheritance, we end up with A and offset
904    sizeof(int).
905 
906    If we can not find corresponding class, give up by setting
907    CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
908    Return true when lookup was sucesful.  */
909 
910 static bool
get_class_context(ipa_polymorphic_call_context * context,tree expected_type)911 get_class_context (ipa_polymorphic_call_context *context,
912 		   tree expected_type)
913 {
914   tree type = context->outer_type;
915   HOST_WIDE_INT offset = context->offset;
916 
917   /* Find the sub-object the constant actually refers to and mark whether it is
918      an artificial one (as opposed to a user-defined one).  */
919   while (true)
920     {
921       HOST_WIDE_INT pos, size;
922       tree fld;
923 
924       /* On a match, just return what we found.  */
925       if (TREE_CODE (type) == TREE_CODE (expected_type)
926 	  && types_same_for_odr (type, expected_type))
927 	{
928 	  /* Type can not contain itself on an non-zero offset.  In that case
929 	     just give up.  */
930 	  if (offset != 0)
931 	    goto give_up;
932 	  gcc_assert (offset == 0);
933 	  return true;
934 	}
935 
936       /* Walk fields and find corresponding on at OFFSET.  */
937       if (TREE_CODE (type) == RECORD_TYPE)
938 	{
939 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
940 	    {
941 	      if (TREE_CODE (fld) != FIELD_DECL)
942 		continue;
943 
944 	      pos = int_bit_position (fld);
945 	      size = tree_to_uhwi (DECL_SIZE (fld));
946 	      if (pos <= offset && (pos + size) > offset)
947 		break;
948 	    }
949 
950 	  if (!fld)
951 	    goto give_up;
952 
953 	  type = TREE_TYPE (fld);
954 	  offset -= pos;
955 	  /* DECL_ARTIFICIAL represents a basetype.  */
956 	  if (!DECL_ARTIFICIAL (fld))
957 	    {
958 	      context->outer_type = type;
959 	      context->offset = offset;
960 	      /* As soon as we se an field containing the type,
961 		 we know we are not looking for derivations.  */
962 	      context->maybe_derived_type = false;
963 	    }
964 	}
965       else if (TREE_CODE (type) == ARRAY_TYPE)
966 	{
967 	  tree subtype = TREE_TYPE (type);
968 
969 	  /* Give up if we don't know array size.  */
970 	  if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
971 	      || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
972 	    goto give_up;
973 	  offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
974 	  type = subtype;
975 	  context->outer_type = type;
976 	  context->offset = offset;
977 	  context->maybe_derived_type = false;
978 	}
979       /* Give up on anything else.  */
980       else
981 	goto give_up;
982     }
983 
984   /* If we failed to find subtype we look for, give up and fall back to the
985      most generic query.  */
986 give_up:
987   context->outer_type = expected_type;
988   context->offset = 0;
989   context->maybe_derived_type = true;
990   context->maybe_in_construction = true;
991   /* POD can be changed to an instance of a polymorphic type by
992      placement new.  Here we play safe and assume that any
993      non-polymorphic type is POD.  */
994   if ((TREE_CODE (type) != RECORD_TYPE
995        || !TYPE_BINFO (type)
996        || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
997       && (!TYPE_SIZE (type)
998 	  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
999 	  || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1000 	      tree_to_uhwi (TYPE_SIZE (type)))))
1001     return true;
1002   return false;
1003 }
1004 
1005 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.  */
1006 
1007 static bool
contains_type_p(tree outer_type,HOST_WIDE_INT offset,tree otr_type)1008 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1009 		 tree otr_type)
1010 {
1011   ipa_polymorphic_call_context context = {offset, outer_type,
1012 					  false, true};
1013   return get_class_context (&context, otr_type);
1014 }
1015 
1016 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET.  */
1017 
1018 static tree
subbinfo_with_vtable_at_offset(tree binfo,unsigned HOST_WIDE_INT offset,tree vtable)1019 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1020 				tree vtable)
1021 {
1022   tree v = BINFO_VTABLE (binfo);
1023   int i;
1024   tree base_binfo;
1025   unsigned HOST_WIDE_INT this_offset;
1026 
1027   if (v)
1028     {
1029       if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1030 	gcc_unreachable ();
1031 
1032       if (offset == this_offset
1033 	  && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1034 	return binfo;
1035     }
1036 
1037   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1038     if (polymorphic_type_binfo_p (base_binfo))
1039       {
1040 	base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1041 	if (base_binfo)
1042 	  return base_binfo;
1043       }
1044   return NULL;
1045 }
1046 
1047 /* T is known constant value of virtual table pointer.
1048    Store virtual table to V and its offset to OFFSET.
1049    Return false if T does not look like virtual table reference.  */
1050 
1051 bool
vtable_pointer_value_to_vtable(tree t,tree * v,unsigned HOST_WIDE_INT * offset)1052 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1053 {
1054   /* We expect &MEM[(void *)&virtual_table + 16B].
1055      We obtain object's BINFO from the context of the virtual table.
1056      This one contains pointer to virtual table represented via
1057      POINTER_PLUS_EXPR.  Verify that this pointer match to what
1058      we propagated through.
1059 
1060      In the case of virtual inheritance, the virtual tables may
1061      be nested, i.e. the offset may be different from 16 and we may
1062      need to dive into the type representation.  */
1063   if (TREE_CODE (t) == ADDR_EXPR
1064       && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1065       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1066       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1067       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1068 	  == VAR_DECL)
1069       && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1070 					 (TREE_OPERAND (t, 0), 0), 0)))
1071     {
1072       *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1073       *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1074       return true;
1075     }
1076 
1077   /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1078      We need to handle it when T comes from static variable initializer or
1079      BINFO. */
1080   if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1081     {
1082       *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1083       t = TREE_OPERAND (t, 0);
1084     }
1085   else
1086     *offset = 0;
1087 
1088   if (TREE_CODE (t) != ADDR_EXPR)
1089     return false;
1090   *v = TREE_OPERAND (t, 0);
1091   return true;
1092 }
1093 
1094 /* T is known constant value of virtual table pointer.  Return BINFO of the
1095    instance type.  */
1096 
1097 tree
vtable_pointer_value_to_binfo(tree t)1098 vtable_pointer_value_to_binfo (tree t)
1099 {
1100   tree vtable;
1101   unsigned HOST_WIDE_INT offset;
1102 
1103   if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1104     return NULL_TREE;
1105 
1106   /* FIXME: for stores of construction vtables we return NULL,
1107      because we do not have BINFO for those. Eventually we should fix
1108      our representation to allow this case to be handled, too.
1109      In the case we see store of BINFO we however may assume
1110      that standard folding will be ale to cope with it.  */
1111   return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1112 					 offset, vtable);
1113 }
1114 
1115 /* Proudce polymorphic call context for call method of instance
1116    that is located within BASE (that is assumed to be a decl) at OFFSET. */
1117 
1118 static void
get_polymorphic_call_info_for_decl(ipa_polymorphic_call_context * context,tree base,HOST_WIDE_INT offset)1119 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1120 				    tree base, HOST_WIDE_INT offset)
1121 {
1122   gcc_assert (DECL_P (base));
1123 
1124   context->outer_type = TREE_TYPE (base);
1125   context->offset = offset;
1126   /* Make very conservative assumption that all objects
1127      may be in construction.
1128      TODO: ipa-prop already contains code to tell better.
1129      merge it later.  */
1130   context->maybe_in_construction = true;
1131   context->maybe_derived_type = false;
1132 }
1133 
1134 /* CST is an invariant (address of decl), try to get meaningful
1135    polymorphic call context for polymorphic call of method
1136    if instance of OTR_TYPE that is located at OFFSET of this invariant.
1137    Return FALSE if nothing meaningful can be found.  */
1138 
1139 bool
get_polymorphic_call_info_from_invariant(ipa_polymorphic_call_context * context,tree cst,tree otr_type,HOST_WIDE_INT offset)1140 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1141 				          tree cst,
1142 				          tree otr_type,
1143 				          HOST_WIDE_INT offset)
1144 {
1145   HOST_WIDE_INT offset2, size, max_size;
1146   tree base;
1147 
1148   if (TREE_CODE (cst) != ADDR_EXPR)
1149     return false;
1150 
1151   cst = TREE_OPERAND (cst, 0);
1152   base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1153   if (!DECL_P (base) || max_size == -1 || max_size != size)
1154     return false;
1155 
1156   /* Only type inconsistent programs can have otr_type that is
1157      not part of outer type.  */
1158   if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1159     return false;
1160 
1161   get_polymorphic_call_info_for_decl (context, base, offset);
1162   return true;
1163 }
1164 
1165 /* Given REF call in FNDECL, determine class of the polymorphic
1166    call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1167    Return pointer to object described by the context  */
1168 
1169 tree
get_polymorphic_call_info(tree fndecl,tree ref,tree * otr_type,HOST_WIDE_INT * otr_token,ipa_polymorphic_call_context * context)1170 get_polymorphic_call_info (tree fndecl,
1171 			   tree ref,
1172 			   tree *otr_type,
1173 			   HOST_WIDE_INT *otr_token,
1174 			   ipa_polymorphic_call_context *context)
1175 {
1176   tree base_pointer;
1177   *otr_type = obj_type_ref_class (ref);
1178   *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1179 
1180   /* Set up basic info in case we find nothing interesting in the analysis.  */
1181   context->outer_type = *otr_type;
1182   context->offset = 0;
1183   base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1184   context->maybe_derived_type = true;
1185   context->maybe_in_construction = false;
1186 
1187   /* Walk SSA for outer object.  */
1188   do
1189     {
1190       if (TREE_CODE (base_pointer) == SSA_NAME
1191 	  && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1192 	  && SSA_NAME_DEF_STMT (base_pointer)
1193 	  && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1194 	{
1195 	  base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1196 	  STRIP_NOPS (base_pointer);
1197 	}
1198       else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1199 	{
1200 	  HOST_WIDE_INT size, max_size;
1201 	  HOST_WIDE_INT offset2;
1202 	  tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1203 					       &offset2, &size, &max_size);
1204 
1205 	  /* If this is a varying address, punt.  */
1206 	  if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1207 	      && max_size != -1
1208 	      && max_size == size)
1209 	    {
1210 	      /* We found dereference of a pointer.  Type of the pointer
1211 		 and MEM_REF is meaningless, but we can look futher.  */
1212 	      if (TREE_CODE (base) == MEM_REF)
1213 		{
1214 		  base_pointer = TREE_OPERAND (base, 0);
1215 		  context->offset
1216 		     += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1217 		  context->outer_type = NULL;
1218 		}
1219 	      /* We found base object.  In this case the outer_type
1220 		 is known.  */
1221 	      else if (DECL_P (base))
1222 		{
1223 		  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1224 
1225 		  /* Only type inconsistent programs can have otr_type that is
1226 		     not part of outer type.  */
1227 		  if (!contains_type_p (TREE_TYPE (base),
1228 					context->offset + offset2, *otr_type))
1229 		    {
1230 		      /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1231 			 code sequences; we arrange the calls to be builtin_unreachable
1232 			 later.  */
1233 		      *otr_token = INT_MAX;
1234 		      return base_pointer;
1235 		    }
1236 		  get_polymorphic_call_info_for_decl (context, base,
1237 						      context->offset + offset2);
1238 		  return NULL;
1239 		}
1240 	      else
1241 		break;
1242 	    }
1243 	  else
1244 	    break;
1245 	}
1246       else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1247 	       && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1248 	{
1249 	  context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1250 		    * BITS_PER_UNIT;
1251 	  base_pointer = TREE_OPERAND (base_pointer, 0);
1252 	}
1253       else
1254 	break;
1255     }
1256   while (true);
1257 
1258   /* Try to determine type of the outer object.  */
1259   if (TREE_CODE (base_pointer) == SSA_NAME
1260       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1261       && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1262     {
1263       /* See if parameter is THIS pointer of a method.  */
1264       if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1265 	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1266 	{
1267 	  context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1268 	  gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1269 
1270 	  /* Dynamic casting has possibly upcasted the type
1271 	     in the hiearchy.  In this case outer type is less
1272 	     informative than inner type and we should forget
1273 	     about it.  */
1274 	  if (!contains_type_p (context->outer_type, context->offset,
1275 				*otr_type))
1276 	    {
1277 	      context->outer_type = NULL;
1278 	      return base_pointer;
1279 	    }
1280 
1281 	  /* If the function is constructor or destructor, then
1282 	     the type is possibly in construction, but we know
1283 	     it is not derived type.  */
1284 	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1285 	      || DECL_CXX_DESTRUCTOR_P (fndecl))
1286 	    {
1287 	      context->maybe_in_construction = true;
1288 	      context->maybe_derived_type = false;
1289 	    }
1290 	  else
1291 	    {
1292 	      context->maybe_derived_type = true;
1293 	      context->maybe_in_construction = false;
1294 	    }
1295 	  return base_pointer;
1296 	}
1297       /* Non-PODs passed by value are really passed by invisible
1298 	 reference.  In this case we also know the type of the
1299 	 object.  */
1300       if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1301 	{
1302 	  context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1303 	  gcc_assert (!POINTER_TYPE_P (context->outer_type));
1304 	  /* Only type inconsistent programs can have otr_type that is
1305 	     not part of outer type.  */
1306 	  if (!contains_type_p (context->outer_type, context->offset,
1307 			        *otr_type))
1308 	    {
1309 	      /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1310 		 code sequences; we arrange the calls to be builtin_unreachable
1311 		 later.  */
1312 	      *otr_token = INT_MAX;
1313 	      return base_pointer;
1314 	    }
1315 	  context->maybe_derived_type = false;
1316 	  context->maybe_in_construction = false;
1317           return base_pointer;
1318 	}
1319     }
1320   /* TODO: There are multiple ways to derive a type.  For instance
1321      if BASE_POINTER is passed to an constructor call prior our refernece.
1322      We do not make this type of flow sensitive analysis yet.  */
1323   return base_pointer;
1324 }
1325 
1326 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1327    Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1328    and insert them to NODES.
1329 
1330    MATCHED_VTABLES and INSERTED is used to avoid duplicated work.  */
1331 
1332 static void
record_targets_from_bases(tree otr_type,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,vec<cgraph_node * > & nodes,pointer_set_t * inserted,pointer_set_t * matched_vtables,bool * completep)1333 record_targets_from_bases (tree otr_type,
1334 			   HOST_WIDE_INT otr_token,
1335 			   tree outer_type,
1336 			   HOST_WIDE_INT offset,
1337 			   vec <cgraph_node *> &nodes,
1338 			   pointer_set_t *inserted,
1339 			   pointer_set_t *matched_vtables,
1340 			   bool *completep)
1341 {
1342   while (true)
1343     {
1344       HOST_WIDE_INT pos, size;
1345       tree base_binfo;
1346       tree fld;
1347 
1348       if (types_same_for_odr (outer_type, otr_type))
1349 	return;
1350 
1351       for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1352 	{
1353 	  if (TREE_CODE (fld) != FIELD_DECL)
1354 	    continue;
1355 
1356 	  pos = int_bit_position (fld);
1357 	  size = tree_to_shwi (DECL_SIZE (fld));
1358 	  if (pos <= offset && (pos + size) > offset
1359 	      /* Do not get confused by zero sized bases.  */
1360 	      && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1361 	    break;
1362 	}
1363       /* Within a class type we should always find correcponding fields.  */
1364       gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1365 
1366       /* Nonbasetypes should have been stripped by outer_class_type.  */
1367       gcc_assert (DECL_ARTIFICIAL (fld));
1368 
1369       outer_type = TREE_TYPE (fld);
1370       offset -= pos;
1371 
1372       base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1373 					offset, otr_type);
1374       if (!base_binfo)
1375 	{
1376 	  gcc_assert (odr_violation_reported);
1377 	  return;
1378 	}
1379       gcc_assert (base_binfo);
1380       if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1381 	{
1382 	  bool can_refer;
1383 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
1384 							  base_binfo,
1385 							  &can_refer);
1386 	  maybe_record_node (nodes, target, inserted, can_refer, completep);
1387 	  pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1388 	}
1389     }
1390 }
1391 
1392 /* When virtual table is removed, we may need to flush the cache.  */
1393 
1394 static void
devirt_variable_node_removal_hook(varpool_node * n,void * d ATTRIBUTE_UNUSED)1395 devirt_variable_node_removal_hook (varpool_node *n,
1396 				   void *d ATTRIBUTE_UNUSED)
1397 {
1398   if (cached_polymorphic_call_targets
1399       && DECL_VIRTUAL_P (n->decl)
1400       && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1401     free_polymorphic_call_targets_hash ();
1402 }
1403 
1404 /* Return vector containing possible targets of polymorphic call of type
1405    OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1406    If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1407    OTR_TYPE and include their virtual method.  This is useful for types
1408    possibly in construction or destruction where the virtual table may
1409    temporarily change to one of base types.  INCLUDE_DERIVER_TYPES make
1410    us to walk the inheritance graph for all derivations.
1411 
1412    OTR_TOKEN == INT_MAX is used to mark calls that are provably
1413    undefined and should be redirected to unreachable.
1414 
1415    If COMPLETEP is non-NULL, store true if the list is complete.
1416    CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1417    in the target cache.  If user needs to visit every target list
1418    just once, it can memoize them.
1419 
1420    NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1421    the type is not in the construction.  Those targets appear first in the
1422    vector returned.
1423 
1424    Returned vector is placed into cache.  It is NOT caller's responsibility
1425    to free it.  The vector can be freed on cgraph_remove_node call if
1426    the particular node is a virtual function present in the cache.  */
1427 
1428 vec <cgraph_node *>
possible_polymorphic_call_targets(tree otr_type,HOST_WIDE_INT otr_token,ipa_polymorphic_call_context context,bool * completep,void ** cache_token,int * nonconstruction_targetsp)1429 possible_polymorphic_call_targets (tree otr_type,
1430 			           HOST_WIDE_INT otr_token,
1431 				   ipa_polymorphic_call_context context,
1432 			           bool *completep,
1433 			           void **cache_token,
1434 				   int *nonconstruction_targetsp)
1435 {
1436   static struct cgraph_node_hook_list *node_removal_hook_holder;
1437   pointer_set_t *inserted;
1438   pointer_set_t *matched_vtables;
1439   vec <cgraph_node *> nodes = vNULL;
1440   odr_type type, outer_type;
1441   polymorphic_call_target_d key;
1442   polymorphic_call_target_d **slot;
1443   unsigned int i;
1444   tree binfo, target;
1445   bool complete;
1446   bool can_refer;
1447 
1448   /* If ODR is not initialized, return empty incomplete list.  */
1449   if (!odr_hash.is_created ())
1450     {
1451       if (completep)
1452 	*completep = false;
1453       if (cache_token)
1454 	*cache_token = NULL;
1455       if (nonconstruction_targetsp)
1456 	*nonconstruction_targetsp = 0;
1457       return nodes;
1458     }
1459 
1460   /* If we hit type inconsistency, just return empty list of targets.  */
1461   if (otr_token == INT_MAX)
1462     {
1463       if (completep)
1464 	*completep = true;
1465       if (cache_token)
1466 	*cache_token = NULL;
1467       if (nonconstruction_targetsp)
1468 	*nonconstruction_targetsp = 0;
1469       return nodes;
1470     }
1471 
1472   type = get_odr_type (otr_type, true);
1473 
1474   /* Lookup the outer class type we want to walk.  */
1475   if (context.outer_type
1476       && !get_class_context (&context, otr_type))
1477     {
1478       if (completep)
1479 	*completep = false;
1480       if (cache_token)
1481 	*cache_token = NULL;
1482       if (nonconstruction_targetsp)
1483 	*nonconstruction_targetsp = 0;
1484       return nodes;
1485     }
1486 
1487   /* We canonicalize our query, so we do not need extra hashtable entries.  */
1488 
1489   /* Without outer type, we have no use for offset.  Just do the
1490      basic search from innter type  */
1491   if (!context.outer_type)
1492     {
1493       context.outer_type = otr_type;
1494       context.offset = 0;
1495     }
1496   /* We need to update our hiearchy if the type does not exist.  */
1497   outer_type = get_odr_type (context.outer_type, true);
1498   /* If outer and inner type match, there are no bases to see.  */
1499   if (type == outer_type)
1500     context.maybe_in_construction = false;
1501   /* If the type is complete, there are no derivations.  */
1502   if (TYPE_FINAL_P (outer_type->type))
1503     context.maybe_derived_type = false;
1504 
1505   /* Initialize query cache.  */
1506   if (!cached_polymorphic_call_targets)
1507     {
1508       cached_polymorphic_call_targets = pointer_set_create ();
1509       polymorphic_call_target_hash.create (23);
1510       if (!node_removal_hook_holder)
1511 	{
1512 	  node_removal_hook_holder =
1513 	    cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1514 	  varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1515 					 NULL);
1516 	}
1517     }
1518 
1519   /* Lookup cached answer.  */
1520   key.type = type;
1521   key.otr_token = otr_token;
1522   key.context = context;
1523   slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1524   if (cache_token)
1525    *cache_token = (void *)*slot;
1526   if (*slot)
1527     {
1528       if (completep)
1529 	*completep = (*slot)->complete;
1530       if (nonconstruction_targetsp)
1531 	*nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1532       return (*slot)->targets;
1533     }
1534 
1535   complete = true;
1536 
1537   /* Do actual search.  */
1538   timevar_push (TV_IPA_VIRTUAL_CALL);
1539   *slot = XCNEW (polymorphic_call_target_d);
1540   if (cache_token)
1541     *cache_token = (void *)*slot;
1542   (*slot)->type = type;
1543   (*slot)->otr_token = otr_token;
1544   (*slot)->context = context;
1545 
1546   inserted = pointer_set_create ();
1547   matched_vtables = pointer_set_create ();
1548 
1549   /* First see virtual method of type itself.  */
1550   binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1551 			       context.offset, otr_type);
1552   if (binfo)
1553     target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1554 					       &can_refer);
1555   else
1556     {
1557       gcc_assert (odr_violation_reported);
1558       target = NULL;
1559     }
1560 
1561   maybe_record_node (nodes, target, inserted, can_refer, &complete);
1562 
1563   if (target)
1564     {
1565       /* In the case we get complete method, we don't need
1566 	 to walk derivations.  */
1567       if (DECL_FINAL_P (target))
1568 	context.maybe_derived_type = false;
1569     }
1570   else
1571     gcc_assert (!complete);
1572 
1573   pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1574 
1575   /* Next walk recursively all derived types.  */
1576   if (context.maybe_derived_type)
1577     {
1578       /* For anonymous namespace types we can attempt to build full type.
1579 	 All derivations must be in this unit (unless we see partial unit).  */
1580       if (!type->anonymous_namespace || flag_ltrans)
1581 	complete = false;
1582       for (i = 0; i < outer_type->derived_types.length(); i++)
1583 	possible_polymorphic_call_targets_1 (nodes, inserted,
1584 					     matched_vtables,
1585 					     otr_type,
1586 					     outer_type->derived_types[i],
1587 					     otr_token, outer_type->type,
1588 					     context.offset, &complete);
1589     }
1590 
1591   /* Finally walk bases, if asked to.  */
1592   (*slot)->nonconstruction_targets = nodes.length();
1593   if (context.maybe_in_construction)
1594     record_targets_from_bases (otr_type, otr_token, outer_type->type,
1595 			       context.offset, nodes, inserted,
1596 			       matched_vtables, &complete);
1597 
1598   (*slot)->targets = nodes;
1599   (*slot)->complete = complete;
1600   if (completep)
1601     *completep = complete;
1602   if (nonconstruction_targetsp)
1603     *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1604 
1605   pointer_set_destroy (inserted);
1606   pointer_set_destroy (matched_vtables);
1607   timevar_pop (TV_IPA_VIRTUAL_CALL);
1608   return nodes;
1609 }
1610 
1611 /* Dump all possible targets of a polymorphic call.  */
1612 
1613 void
dump_possible_polymorphic_call_targets(FILE * f,tree otr_type,HOST_WIDE_INT otr_token,const ipa_polymorphic_call_context & ctx)1614 dump_possible_polymorphic_call_targets (FILE *f,
1615 					tree otr_type,
1616 					HOST_WIDE_INT otr_token,
1617 					const ipa_polymorphic_call_context &ctx)
1618 {
1619   vec <cgraph_node *> targets;
1620   bool final;
1621   odr_type type = get_odr_type (otr_type, false);
1622   unsigned int i;
1623   int nonconstruction;
1624 
1625   if (!type)
1626     return;
1627   targets = possible_polymorphic_call_targets (otr_type, otr_token,
1628 					       ctx,
1629 					       &final, NULL, &nonconstruction);
1630   fprintf (f, "  Targets of polymorphic call of type %i:", type->id);
1631   print_generic_expr (f, type->type, TDF_SLIM);
1632   fprintf (f, " token %i\n", (int)otr_token);
1633   if (ctx.outer_type || ctx.offset)
1634     {
1635       fprintf (f, "    Contained in type:");
1636       print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1637       fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC "\n",
1638 	       ctx.offset);
1639     }
1640 
1641   fprintf (f, "    %s%s%s\n      ",
1642 	   final ? "This is a complete list." :
1643 	   "This is partial list; extra targets may be defined in other units.",
1644 	   ctx.maybe_in_construction ? " (base types included)" : "",
1645 	   ctx.maybe_derived_type ? " (derived types included)" : "");
1646   for (i = 0; i < targets.length (); i++)
1647     {
1648       char *name = NULL;
1649       if (i == (unsigned)nonconstruction)
1650 	fprintf (f, "\n     If the type is in construction,"
1651 		 " then additional tarets are:\n"
1652 		 "      ");
1653       if (in_lto_p)
1654 	name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1655       fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1656       if (in_lto_p)
1657 	free (name);
1658       if (!targets[i]->definition)
1659 	fprintf (f, " (no definition%s)",
1660 		 DECL_DECLARED_INLINE_P (targets[i]->decl)
1661 		 ? " inline" : "");
1662     }
1663   fprintf (f, "\n\n");
1664 }
1665 
1666 
1667 /* Return true if N can be possibly target of a polymorphic call of
1668    OTR_TYPE/OTR_TOKEN.  */
1669 
1670 bool
possible_polymorphic_call_target_p(tree otr_type,HOST_WIDE_INT otr_token,const ipa_polymorphic_call_context & ctx,struct cgraph_node * n)1671 possible_polymorphic_call_target_p (tree otr_type,
1672 				    HOST_WIDE_INT otr_token,
1673 				    const ipa_polymorphic_call_context &ctx,
1674 				    struct cgraph_node *n)
1675 {
1676   vec <cgraph_node *> targets;
1677   unsigned int i;
1678   enum built_in_function fcode;
1679   bool final;
1680 
1681   if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1682       && ((fcode = DECL_FUNCTION_CODE (n->decl))
1683 	  == BUILT_IN_UNREACHABLE
1684           || fcode == BUILT_IN_TRAP))
1685     return true;
1686 
1687   if (!odr_hash.is_created ())
1688     return true;
1689   targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1690   for (i = 0; i < targets.length (); i++)
1691     if (symtab_semantically_equivalent_p (n, targets[i]))
1692       return true;
1693 
1694   /* At a moment we allow middle end to dig out new external declarations
1695      as a targets of polymorphic calls.  */
1696   if (!final && !n->definition)
1697     return true;
1698   return false;
1699 }
1700 
1701 
1702 /* After callgraph construction new external nodes may appear.
1703    Add them into the graph.  */
1704 
1705 void
update_type_inheritance_graph(void)1706 update_type_inheritance_graph (void)
1707 {
1708   struct cgraph_node *n;
1709 
1710   if (!odr_hash.is_created ())
1711     return;
1712   free_polymorphic_call_targets_hash ();
1713   timevar_push (TV_IPA_INHERITANCE);
1714   /* We reconstruct the graph starting from types of all methods seen in the
1715      the unit.  */
1716   FOR_EACH_FUNCTION (n)
1717     if (DECL_VIRTUAL_P (n->decl)
1718 	&& !n->definition
1719 	&& symtab_real_symbol_p (n))
1720       get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1721   timevar_pop (TV_IPA_INHERITANCE);
1722 }
1723 
1724 
1725 /* Return true if N looks like likely target of a polymorphic call.
1726    Rule out cxa_pure_virtual, noreturns, function declared cold and
1727    other obvious cases.  */
1728 
1729 bool
likely_target_p(struct cgraph_node * n)1730 likely_target_p (struct cgraph_node *n)
1731 {
1732   int flags;
1733   /* cxa_pure_virtual and similar things are not likely.  */
1734   if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1735     return false;
1736   flags = flags_from_decl_or_type (n->decl);
1737   if (flags & ECF_NORETURN)
1738     return false;
1739   if (lookup_attribute ("cold",
1740 			DECL_ATTRIBUTES (n->decl)))
1741     return false;
1742   if (n->frequency < NODE_FREQUENCY_NORMAL)
1743     return false;
1744   return true;
1745 }
1746 
1747 /* The ipa-devirt pass.
1748    When polymorphic call has only one likely target in the unit,
1749    turn it into speculative call.  */
1750 
1751 static unsigned int
ipa_devirt(void)1752 ipa_devirt (void)
1753 {
1754   struct cgraph_node *n;
1755   struct pointer_set_t *bad_call_targets = pointer_set_create ();
1756   struct cgraph_edge *e;
1757 
1758   int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1759   int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1760   int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1761 
1762   FOR_EACH_DEFINED_FUNCTION (n)
1763     {
1764       bool update = false;
1765       if (dump_file && n->indirect_calls)
1766 	fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1767 		 n->name (), n->order);
1768       for (e = n->indirect_calls; e; e = e->next_callee)
1769 	if (e->indirect_info->polymorphic)
1770 	  {
1771 	    struct cgraph_node *likely_target = NULL;
1772 	    void *cache_token;
1773 	    bool final;
1774 	    int nonconstruction_targets;
1775 	    vec <cgraph_node *>targets
1776 	       = possible_polymorphic_call_targets
1777 		    (e, &final, &cache_token, &nonconstruction_targets);
1778 	    unsigned int i;
1779 
1780 	    if (dump_file)
1781 	      dump_possible_polymorphic_call_targets
1782 		(dump_file, e);
1783 
1784 	    npolymorphic++;
1785 
1786 	    if (!cgraph_maybe_hot_edge_p (e))
1787 	      {
1788 		if (dump_file)
1789 		  fprintf (dump_file, "Call is cold\n\n");
1790 		ncold++;
1791 		continue;
1792 	      }
1793 	    if (e->speculative)
1794 	      {
1795 		if (dump_file)
1796 		  fprintf (dump_file, "Call is aready speculated\n\n");
1797 		nspeculated++;
1798 
1799 		/* When dumping see if we agree with speculation.  */
1800 		if (!dump_file)
1801 		  continue;
1802 	      }
1803 	    if (pointer_set_contains (bad_call_targets,
1804 				      cache_token))
1805 	      {
1806 		if (dump_file)
1807 		  fprintf (dump_file, "Target list is known to be useless\n\n");
1808 		nmultiple++;
1809 		continue;
1810 	      }
1811 	    for (i = 0; i < targets.length (); i++)
1812 	      if (likely_target_p (targets[i]))
1813 		{
1814 		  if (likely_target)
1815 		    {
1816 		      if (i < (unsigned) nonconstruction_targets)
1817 			{
1818 			  likely_target = NULL;
1819 			  if (dump_file)
1820 			    fprintf (dump_file, "More than one likely target\n\n");
1821 			  nmultiple++;
1822 			}
1823 		      break;
1824 		    }
1825 		  likely_target = targets[i];
1826 		}
1827 	    if (!likely_target)
1828 	      {
1829 		pointer_set_insert (bad_call_targets, cache_token);
1830 	        continue;
1831 	      }
1832 	    /* This is reached only when dumping; check if we agree or disagree
1833  	       with the speculation.  */
1834 	    if (e->speculative)
1835 	      {
1836 		struct cgraph_edge *e2;
1837 		struct ipa_ref *ref;
1838 		cgraph_speculative_call_info (e, e2, e, ref);
1839 		if (cgraph_function_or_thunk_node (e2->callee, NULL)
1840 		    == cgraph_function_or_thunk_node (likely_target, NULL))
1841 		  {
1842 		    fprintf (dump_file, "We agree with speculation\n\n");
1843 		    nok++;
1844 		  }
1845 		else
1846 		  {
1847 		    fprintf (dump_file, "We disagree with speculation\n\n");
1848 		    nwrong++;
1849 		  }
1850 		continue;
1851 	      }
1852 	    if (!likely_target->definition)
1853 	      {
1854 		if (dump_file)
1855 		  fprintf (dump_file, "Target is not an definition\n\n");
1856 		nnotdefined++;
1857 		continue;
1858 	      }
1859 	    /* Do not introduce new references to external symbols.  While we
1860 	       can handle these just well, it is common for programs to
1861 	       incorrectly with headers defining methods they are linked
1862 	       with.  */
1863 	    if (DECL_EXTERNAL (likely_target->decl))
1864 	      {
1865 		if (dump_file)
1866 		  fprintf (dump_file, "Target is external\n\n");
1867 		nexternal++;
1868 		continue;
1869 	      }
1870 	    /* Don't use an implicitly-declared destructor (c++/58678).  */
1871 	    struct cgraph_node *non_thunk_target
1872 	      = cgraph_function_node (likely_target);
1873 	    if (DECL_ARTIFICIAL (non_thunk_target->decl))
1874 	      {
1875 		if (dump_file)
1876 		  fprintf (dump_file, "Target is artificial\n\n");
1877 		nartificial++;
1878 		continue;
1879 	      }
1880 	    if (cgraph_function_body_availability (likely_target)
1881 		<= AVAIL_OVERWRITABLE
1882 		&& symtab_can_be_discarded (likely_target))
1883 	      {
1884 		if (dump_file)
1885 		  fprintf (dump_file, "Target is overwritable\n\n");
1886 		noverwritable++;
1887 		continue;
1888 	      }
1889 	    else
1890 	      {
1891 		if (dump_file)
1892 		  fprintf (dump_file,
1893 			   "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
1894 			   n->name (), n->order,
1895 			   likely_target->name (),
1896 			   likely_target->order);
1897 		if (!symtab_can_be_discarded (likely_target))
1898 		  {
1899 		    cgraph_node *alias;
1900 		    alias = cgraph (symtab_nonoverwritable_alias
1901 				     (likely_target));
1902 		    if (alias)
1903 		      likely_target = alias;
1904 		  }
1905 		nconverted++;
1906 		update = true;
1907 		cgraph_turn_edge_to_speculative
1908 		  (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1909 	      }
1910 	  }
1911       if (update)
1912 	inline_update_overall_summary (n);
1913     }
1914   pointer_set_destroy (bad_call_targets);
1915 
1916   if (dump_file)
1917     fprintf (dump_file,
1918 	     "%i polymorphic calls, %i devirtualized,"
1919 	     " %i speculatively devirtualized, %i cold\n"
1920 	     "%i have multiple targets, %i overwritable,"
1921 	     " %i already speculated (%i agree, %i disagree),"
1922 	     " %i external, %i not defined, %i artificial\n",
1923 	     npolymorphic, ndevirtualized, nconverted, ncold,
1924 	     nmultiple, noverwritable, nspeculated, nok, nwrong,
1925 	     nexternal, nnotdefined, nartificial);
1926   return ndevirtualized ? TODO_remove_functions : 0;
1927 }
1928 
1929 /* Gate for speculative IPA devirtualization optimization.  */
1930 
1931 static bool
gate_ipa_devirt(void)1932 gate_ipa_devirt (void)
1933 {
1934   return (flag_devirtualize
1935 	  && flag_devirtualize_speculatively
1936 	  && optimize);
1937 }
1938 
1939 namespace {
1940 
1941 const pass_data pass_data_ipa_devirt =
1942 {
1943   IPA_PASS, /* type */
1944   "devirt", /* name */
1945   OPTGROUP_NONE, /* optinfo_flags */
1946   true, /* has_gate */
1947   true, /* has_execute */
1948   TV_IPA_DEVIRT, /* tv_id */
1949   0, /* properties_required */
1950   0, /* properties_provided */
1951   0, /* properties_destroyed */
1952   0, /* todo_flags_start */
1953   ( TODO_dump_symtab ), /* todo_flags_finish */
1954 };
1955 
1956 class pass_ipa_devirt : public ipa_opt_pass_d
1957 {
1958 public:
pass_ipa_devirt(gcc::context * ctxt)1959   pass_ipa_devirt (gcc::context *ctxt)
1960     : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1961 		      NULL, /* generate_summary */
1962 		      NULL, /* write_summary */
1963 		      NULL, /* read_summary */
1964 		      NULL, /* write_optimization_summary */
1965 		      NULL, /* read_optimization_summary */
1966 		      NULL, /* stmt_fixup */
1967 		      0, /* function_transform_todo_flags_start */
1968 		      NULL, /* function_transform */
1969 		      NULL) /* variable_transform */
1970   {}
1971 
1972   /* opt_pass methods: */
gate()1973   bool gate () { return gate_ipa_devirt (); }
execute()1974   unsigned int execute () { return ipa_devirt (); }
1975 
1976 }; // class pass_ipa_devirt
1977 
1978 } // anon namespace
1979 
1980 ipa_opt_pass_d *
make_pass_ipa_devirt(gcc::context * ctxt)1981 make_pass_ipa_devirt (gcc::context *ctxt)
1982 {
1983   return new pass_ipa_devirt (ctxt);
1984 }
1985 
1986 #include "gt-ipa-devirt.h"
1987