1 /* Basic IPA utilities for type inheritance graph construction and
2    devirtualization.
3    Copyright (C) 2013-2018 Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* Brief vocabulary:
23      ODR = One Definition Rule
24         In short, the ODR states that:
25 	1 In any translation unit, a template, type, function, or object can
26 	  have no more than one definition. Some of these can have any number
27 	  of declarations. A definition provides an instance.
28         2 In the entire program, an object or non-inline function cannot have
29 	  more than one definition; if an object or function is used, it must
30 	  have exactly one definition. You can declare an object or function
31 	  that is never used, in which case you don't have to provide
32 	  a definition. In no event can there be more than one definition.
33         3 Some things, like types, templates, and extern inline functions, can
34 	  be defined in more than one translation unit. For a given entity,
35 	  each definition must be the same. Non-extern objects and functions
36 	  in different translation units are different entities, even if their
37 	  names and types are the same.
38 
39      OTR = OBJ_TYPE_REF
40        This is the Gimple representation of type information of a polymorphic call.
41        It contains two parameters:
42 	 otr_type is a type of class whose method is called.
43 	 otr_token is the index into virtual table where address is taken.
44 
45      BINFO
46        This is the type inheritance information attached to each tree
47        RECORD_TYPE by the C++ frontend.  It provides information about base
48        types and virtual tables.
49 
50        BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51        BINFO also links to its type by BINFO_TYPE and to the virtual table by
52        BINFO_VTABLE.
53 
54        Base types of a given type are enumerated by BINFO_BASE_BINFO
55        vector.  Members of this vectors are not BINFOs associated
56        with a base type.  Rather they are new copies of BINFOs
57        (base BINFOs). Their virtual tables may differ from
58        virtual table of the base type.  Also BINFO_OFFSET specifies
59        offset of the base within the type.
60 
61        In the case of single inheritance, the virtual table is shared
62        and BINFO_VTABLE of base BINFO is NULL.  In the case of multiple
63        inheritance the individual virtual tables are pointer to by
64        BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65        binfo associated to the base type).
66 
67        BINFO lookup for a given base type and offset can be done by
68        get_binfo_at_offset.  It returns proper BINFO whose virtual table
69        can be used for lookup of virtual methods associated with the
70        base type.
71 
72      token
73        This is an index of virtual method in virtual table associated
74        to the type defining it. Token can be looked up from OBJ_TYPE_REF
75        or from DECL_VINDEX of a given virtual table.
76 
77      polymorphic (indirect) call
78        This is callgraph representation of virtual method call.  Every
79        polymorphic call contains otr_type and otr_token taken from
80        original OBJ_TYPE_REF at callgraph construction time.
81 
82    What we do here:
83 
84    build_type_inheritance_graph triggers a construction of the type inheritance
85    graph.
86 
87      We reconstruct it based on types of methods we see in the unit.
88      This means that the graph is not complete. Types with no methods are not
89      inserted into the graph.  Also types without virtual methods are not
90      represented at all, though it may be easy to add this.
91 
92      The inheritance graph is represented as follows:
93 
94        Vertices are structures odr_type.  Every odr_type may correspond
95        to one or more tree type nodes that are equivalent by ODR rule.
96        (the multiple type nodes appear only with linktime optimization)
97 
98        Edges are represented by odr_type->base and odr_type->derived_types.
99        At the moment we do not track offsets of types for multiple inheritance.
100        Adding this is easy.
101 
102   possible_polymorphic_call_targets returns, given an parameters found in
103   indirect polymorphic edge all possible polymorphic call targets of the call.
104 
105   pass_ipa_devirt performs simple speculative devirtualization.
106 */
107 
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 #include "stringpool.h"
133 #include "attribs.h"
134 
135 /* Hash based set of pairs of types.  */
136 struct type_pair
137 {
138   tree first;
139   tree second;
140 };
141 
142 template <>
143 struct default_hash_traits <type_pair>
144   : typed_noop_remove <type_pair>
145 {
146   GTY((skip)) typedef type_pair value_type;
147   GTY((skip)) typedef type_pair compare_type;
148   static hashval_t
149   hash (type_pair p)
150   {
151     return TYPE_UID (p.first) ^ TYPE_UID (p.second);
152   }
153   static bool
154   is_empty (type_pair p)
155   {
156     return p.first == NULL;
157   }
158   static bool
159   is_deleted (type_pair p ATTRIBUTE_UNUSED)
160     {
161       return false;
162     }
163   static bool
164   equal (const type_pair &a, const type_pair &b)
165     {
166       return a.first==b.first && a.second == b.second;
167     }
168   static void
169   mark_empty (type_pair &e)
170     {
171       e.first = NULL;
172     }
173 };
174 
175 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
176 				    hash_set<type_pair> *,
177 				    location_t, location_t);
178 
179 static bool odr_violation_reported = false;
180 
181 
182 /* Pointer set of all call targets appearing in the cache.  */
183 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
184 
185 /* The node of type inheritance graph.  For each type unique in
186    One Definition Rule (ODR) sense, we produce one node linking all
187    main variants of types equivalent to it, bases and derived types.  */
188 
189 struct GTY(()) odr_type_d
190 {
191   /* leader type.  */
192   tree type;
193   /* All bases; built only for main variants of types.  */
194   vec<odr_type> GTY((skip)) bases;
195   /* All derived types with virtual methods seen in unit;
196      built only for main variants of types.  */
197   vec<odr_type> GTY((skip)) derived_types;
198 
199   /* All equivalent types, if more than one.  */
200   vec<tree, va_gc> *types;
201   /* Set of all equivalent types, if NON-NULL.  */
202   hash_set<tree> * GTY((skip)) types_set;
203 
204   /* Unique ID indexing the type in odr_types array.  */
205   int id;
206   /* Is it in anonymous namespace? */
207   bool anonymous_namespace;
208   /* Do we know about all derivations of given type?  */
209   bool all_derivations_known;
210   /* Did we report ODR violation here?  */
211   bool odr_violated;
212   /* Set when virtual table without RTTI previaled table with.  */
213   bool rtti_broken;
214 };
215 
216 /* Return TRUE if all derived types of T are known and thus
217    we may consider the walk of derived type complete.
218 
219    This is typically true only for final anonymous namespace types and types
220    defined within functions (that may be COMDAT and thus shared across units,
221    but with the same set of derived types).  */
222 
223 bool
224 type_all_derivations_known_p (const_tree t)
225 {
226   if (TYPE_FINAL_P (t))
227     return true;
228   if (flag_ltrans)
229     return false;
230   /* Non-C++ types may have IDENTIFIER_NODE here, do not crash.  */
231   if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
232     return true;
233   if (type_in_anonymous_namespace_p (t))
234     return true;
235   return (decl_function_context (TYPE_NAME (t)) != NULL);
236 }
237 
238 /* Return TRUE if type's constructors are all visible.  */
239 
240 static bool
241 type_all_ctors_visible_p (tree t)
242 {
243   return !flag_ltrans
244 	 && symtab->state >= CONSTRUCTION
245 	 /* We can not always use type_all_derivations_known_p.
246 	    For function local types we must assume case where
247 	    the function is COMDAT and shared in between units.
248 
249 	    TODO: These cases are quite easy to get, but we need
250 	    to keep track of C++ privatizing via -Wno-weak
251 	    as well as the  IPA privatizing.  */
252 	 && type_in_anonymous_namespace_p (t);
253 }
254 
255 /* Return TRUE if type may have instance.  */
256 
257 static bool
258 type_possibly_instantiated_p (tree t)
259 {
260   tree vtable;
261   varpool_node *vnode;
262 
263   /* TODO: Add abstract types here.  */
264   if (!type_all_ctors_visible_p (t))
265     return true;
266 
267   vtable = BINFO_VTABLE (TYPE_BINFO (t));
268   if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
269     vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
270   vnode = varpool_node::get (vtable);
271   return vnode && vnode->definition;
272 }
273 
274 /* Hash used to unify ODR types based on their mangled name and for anonymous
275    namespace types.  */
276 
277 struct odr_name_hasher : pointer_hash <odr_type_d>
278 {
279   typedef union tree_node *compare_type;
280   static inline hashval_t hash (const odr_type_d *);
281   static inline bool equal (const odr_type_d *, const tree_node *);
282   static inline void remove (odr_type_d *);
283 };
284 
285 /* Has used to unify ODR types based on their associated virtual table.
286    This hash is needed to keep -fno-lto-odr-type-merging to work and contains
287    only polymorphic types.  Types with mangled names are inserted to both.  */
288 
289 struct odr_vtable_hasher:odr_name_hasher
290 {
291   static inline hashval_t hash (const odr_type_d *);
292   static inline bool equal (const odr_type_d *, const tree_node *);
293 };
294 
295 /* Return type that was declared with T's name so that T is an
296    qualified variant of it.  */
297 
298 static inline tree
299 main_odr_variant (const_tree t)
300 {
301   if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
302     return TREE_TYPE (TYPE_NAME (t));
303   /* Unnamed types and non-C++ produced types can be compared by variants.  */
304   else
305     return TYPE_MAIN_VARIANT (t);
306 }
307 
308 static bool
309 can_be_name_hashed_p (tree t)
310 {
311   return (!in_lto_p || odr_type_p (t));
312 }
313 
314 /* Hash type by its ODR name.  */
315 
316 static hashval_t
317 hash_odr_name (const_tree t)
318 {
319   gcc_checking_assert (main_odr_variant (t) == t);
320 
321   /* If not in LTO, all main variants are unique, so we can do
322      pointer hash.  */
323   if (!in_lto_p)
324     return htab_hash_pointer (t);
325 
326   /* Anonymous types are unique.  */
327   if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
328     return htab_hash_pointer (t);
329 
330   gcc_checking_assert (TYPE_NAME (t)
331 		       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
332   return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
333 }
334 
335 /* Return the computed hashcode for ODR_TYPE.  */
336 
337 inline hashval_t
338 odr_name_hasher::hash (const odr_type_d *odr_type)
339 {
340   return hash_odr_name (odr_type->type);
341 }
342 
343 static bool
344 can_be_vtable_hashed_p (tree t)
345 {
346   /* vtable hashing can distinguish only main variants.  */
347   if (TYPE_MAIN_VARIANT (t) != t)
348     return false;
349   /* Anonymous namespace types are always handled by name hash.  */
350   if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
351     return false;
352   return (TREE_CODE (t) == RECORD_TYPE
353 	  && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
354 }
355 
356 /* Hash type by assembler name of its vtable.  */
357 
358 static hashval_t
359 hash_odr_vtable (const_tree t)
360 {
361   tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
362   inchash::hash hstate;
363 
364   gcc_checking_assert (in_lto_p);
365   gcc_checking_assert (!type_in_anonymous_namespace_p (t));
366   gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
367 		       && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
368   gcc_checking_assert (main_odr_variant (t) == t);
369 
370   if (TREE_CODE (v) == POINTER_PLUS_EXPR)
371     {
372       add_expr (TREE_OPERAND (v, 1), hstate);
373       v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
374     }
375 
376   hstate.add_hwi (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
377   return hstate.end ();
378 }
379 
380 /* Return the computed hashcode for ODR_TYPE.  */
381 
382 inline hashval_t
383 odr_vtable_hasher::hash (const odr_type_d *odr_type)
384 {
385   return hash_odr_vtable (odr_type->type);
386 }
387 
388 /* For languages with One Definition Rule, work out if
389    types are the same based on their name.
390 
391    This is non-trivial for LTO where minor differences in
392    the type representation may have prevented type merging
393    to merge two copies of otherwise equivalent type.
394 
395    Until we start streaming mangled type names, this function works
396    only for polymorphic types.
397 
398    When STRICT is true, we compare types by their names for purposes of
399    ODR violation warnings.  When strict is false, we consider variants
400    equivalent, because it is all that matters for devirtualization machinery.
401 */
402 
403 bool
404 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
405 {
406   gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
407 
408   type1 = main_odr_variant (type1);
409   type2 = main_odr_variant (type2);
410   if (!strict)
411     {
412       type1 = TYPE_MAIN_VARIANT (type1);
413       type2 = TYPE_MAIN_VARIANT (type2);
414     }
415 
416   if (type1 == type2)
417     return true;
418 
419   if (!in_lto_p)
420     return false;
421 
422   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
423      on the corresponding TYPE_STUB_DECL.  */
424   if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
425       || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
426     return false;
427 
428 
429   /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
430 
431      Ideally we should never need types without ODR names here.  It can however
432      happen in two cases:
433 
434        1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
435           Here testing for equivalence is safe, since their MAIN_VARIANTs are
436           unique.
437        2) for units streamed with -fno-lto-odr-type-merging.  Here we can't
438 	  establish precise ODR equivalency, but for correctness we care only
439 	  about equivalency on complete polymorphic types.  For these we can
440 	  compare assembler names of their virtual tables.  */
441   if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
442       || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
443     {
444       /* See if types are obviously different (i.e. different codes
445 	 or polymorphic wrt non-polymorphic).  This is not strictly correct
446 	 for ODR violating programs, but we can't do better without streaming
447 	 ODR names.  */
448       if (TREE_CODE (type1) != TREE_CODE (type2))
449 	return false;
450       if (TREE_CODE (type1) == RECORD_TYPE
451 	  && (TYPE_BINFO (type1) == NULL_TREE)
452 	      != (TYPE_BINFO (type2) == NULL_TREE))
453 	return false;
454       if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
455 	  && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
456 	     != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
457 	return false;
458 
459       /* At the moment we have no way to establish ODR equivalence at LTO
460 	 other than comparing virtual table pointers of polymorphic types.
461 	 Eventually we should start saving mangled names in TYPE_NAME.
462 	 Then this condition will become non-trivial.  */
463 
464       if (TREE_CODE (type1) == RECORD_TYPE
465 	  && TYPE_BINFO (type1) && TYPE_BINFO (type2)
466 	  && BINFO_VTABLE (TYPE_BINFO (type1))
467 	  && BINFO_VTABLE (TYPE_BINFO (type2)))
468 	{
469 	  tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
470 	  tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
471 	  gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
472 		      && TREE_CODE (v2) == POINTER_PLUS_EXPR);
473 	  return (operand_equal_p (TREE_OPERAND (v1, 1),
474 				   TREE_OPERAND (v2, 1), 0)
475 		  && DECL_ASSEMBLER_NAME
476 			 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
477 		     == DECL_ASSEMBLER_NAME
478 			 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
479 	}
480       gcc_unreachable ();
481     }
482   return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
483 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
484 }
485 
486 /* Return true if we can decide on ODR equivalency.
487 
488    In non-LTO it is always decide, in LTO however it depends in the type has
489    ODR info attached.
490 
491    When STRICT is false, compare main variants.  */
492 
493 bool
494 types_odr_comparable (tree t1, tree t2, bool strict)
495 {
496   return (!in_lto_p
497 	  || (strict ? (main_odr_variant (t1) == main_odr_variant (t2)
498 			&& main_odr_variant (t1))
499 	      : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
500 	  || (odr_type_p (t1) && odr_type_p (t2))
501 	  || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
502 	      && TYPE_BINFO (t1) && TYPE_BINFO (t2)
503 	      && polymorphic_type_binfo_p (TYPE_BINFO (t1))
504 	      && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
505 }
506 
507 /* Return true if T1 and T2 are ODR equivalent.  If ODR equivalency is not
508    known, be conservative and return false.  */
509 
510 bool
511 types_must_be_same_for_odr (tree t1, tree t2)
512 {
513   if (types_odr_comparable (t1, t2))
514     return types_same_for_odr (t1, t2);
515   else
516     return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
517 }
518 
519 /* If T is compound type, return type it is based on.  */
520 
521 static tree
522 compound_type_base (const_tree t)
523 {
524   if (TREE_CODE (t) == ARRAY_TYPE
525       || POINTER_TYPE_P (t)
526       || TREE_CODE (t) == COMPLEX_TYPE
527       || VECTOR_TYPE_P (t))
528     return TREE_TYPE (t);
529   if (TREE_CODE (t) == METHOD_TYPE)
530     return TYPE_METHOD_BASETYPE (t);
531   if (TREE_CODE (t) == OFFSET_TYPE)
532     return TYPE_OFFSET_BASETYPE (t);
533   return NULL_TREE;
534 }
535 
536 /* Return true if T is either ODR type or compound type based from it.
537    If the function return true, we know that T is a type originating from C++
538    source even at link-time.  */
539 
540 bool
541 odr_or_derived_type_p (const_tree t)
542 {
543   do
544     {
545       if (odr_type_p (t))
546 	return true;
547       /* Function type is a tricky one. Basically we can consider it
548 	 ODR derived if return type or any of the parameters is.
549 	 We need to check all parameters because LTO streaming merges
550 	 common types (such as void) and they are not considered ODR then.  */
551       if (TREE_CODE (t) == FUNCTION_TYPE)
552 	{
553 	  if (TYPE_METHOD_BASETYPE (t))
554 	    t = TYPE_METHOD_BASETYPE (t);
555 	  else
556 	   {
557 	     if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
558 	       return true;
559 	     for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
560 	       if (odr_or_derived_type_p (TREE_VALUE (t)))
561 		 return true;
562 	     return false;
563 	   }
564 	}
565       else
566 	t = compound_type_base (t);
567     }
568   while (t);
569   return t;
570 }
571 
572 /* Compare types T1 and T2 and return true if they are
573    equivalent.  */
574 
575 inline bool
576 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
577 {
578   tree t1 = o1->type;
579 
580   gcc_checking_assert (main_odr_variant (t2) == t2);
581   gcc_checking_assert (main_odr_variant (t1) == t1);
582   if (t1 == t2)
583     return true;
584   if (!in_lto_p)
585     return false;
586   /* Check for anonymous namespaces. Those have !TREE_PUBLIC
587      on the corresponding TYPE_STUB_DECL.  */
588   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
589       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
590     return false;
591   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
592   gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
593   return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
594 	  == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
595 }
596 
597 /* Compare types T1 and T2 and return true if they are
598    equivalent.  */
599 
600 inline bool
601 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
602 {
603   tree t1 = o1->type;
604 
605   gcc_checking_assert (main_odr_variant (t2) == t2);
606   gcc_checking_assert (main_odr_variant (t1) == t1);
607   gcc_checking_assert (in_lto_p);
608   t1 = TYPE_MAIN_VARIANT (t1);
609   t2 = TYPE_MAIN_VARIANT (t2);
610   if (t1 == t2)
611     return true;
612   tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
613   tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
614   return (operand_equal_p (TREE_OPERAND (v1, 1),
615 			   TREE_OPERAND (v2, 1), 0)
616 	  && DECL_ASSEMBLER_NAME
617 		 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
618 	     == DECL_ASSEMBLER_NAME
619 		 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
620 }
621 
622 /* Free ODR type V.  */
623 
624 inline void
625 odr_name_hasher::remove (odr_type_d *v)
626 {
627   v->bases.release ();
628   v->derived_types.release ();
629   if (v->types_set)
630     delete v->types_set;
631   ggc_free (v);
632 }
633 
634 /* ODR type hash used to look up ODR type based on tree type node.  */
635 
636 typedef hash_table<odr_name_hasher> odr_hash_type;
637 static odr_hash_type *odr_hash;
638 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
639 static odr_vtable_hash_type *odr_vtable_hash;
640 
641 /* ODR types are also stored into ODR_TYPE vector to allow consistent
642    walking.  Bases appear before derived types.  Vector is garbage collected
643    so we won't end up visiting empty types.  */
644 
645 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
646 #define odr_types (*odr_types_ptr)
647 
648 /* Set TYPE_BINFO of TYPE and its variants to BINFO.  */
649 void
650 set_type_binfo (tree type, tree binfo)
651 {
652   for (; type; type = TYPE_NEXT_VARIANT (type))
653     if (COMPLETE_TYPE_P (type))
654       TYPE_BINFO (type) = binfo;
655     else
656       gcc_assert (!TYPE_BINFO (type));
657 }
658 
659 /* Compare T1 and T2 based on name or structure.  */
660 
661 static bool
662 odr_subtypes_equivalent_p (tree t1, tree t2,
663 			   hash_set<type_pair> *visited,
664 			   location_t loc1, location_t loc2)
665 {
666 
667   /* This can happen in incomplete types that should be handled earlier.  */
668   gcc_assert (t1 && t2);
669 
670   t1 = main_odr_variant (t1);
671   t2 = main_odr_variant (t2);
672   if (t1 == t2)
673     return true;
674 
675   /* Anonymous namespace types must match exactly.  */
676   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
677       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
678     return false;
679 
680   /* For ODR types be sure to compare their names.
681      To support -Wno-odr-type-merging we allow one type to be non-ODR
682      and other ODR even though it is a violation.  */
683   if (types_odr_comparable (t1, t2, true))
684     {
685       if (!types_same_for_odr (t1, t2, true))
686         return false;
687       /* Limit recursion: If subtypes are ODR types and we know
688 	 that they are same, be happy.  */
689       if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
690         return true;
691     }
692 
693   /* Component types, builtins and possibly violating ODR types
694      have to be compared structurally.  */
695   if (TREE_CODE (t1) != TREE_CODE (t2))
696     return false;
697   if (AGGREGATE_TYPE_P (t1)
698       && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
699     return false;
700 
701   type_pair pair={t1,t2};
702   if (TYPE_UID (t1) > TYPE_UID (t2))
703     {
704       pair.first = t2;
705       pair.second = t1;
706     }
707   if (visited->add (pair))
708     return true;
709   return odr_types_equivalent_p (t1, t2, false, NULL, visited, loc1, loc2);
710 }
711 
712 /* Return true if DECL1 and DECL2 are identical methods.  Consider
713    name equivalent to name.localalias.xyz.  */
714 
715 static bool
716 methods_equal_p (tree decl1, tree decl2)
717 {
718   if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
719     return true;
720   const char sep = symbol_table::symbol_suffix_separator ();
721 
722   const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
723   const char *ptr1 = strchr (name1, sep);
724   int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
725 
726   const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
727   const char *ptr2 = strchr (name2, sep);
728   int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
729 
730   if (len1 != len2)
731     return false;
732   return !strncmp (name1, name2, len1);
733 }
734 
735 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
736    violation warnings.  */
737 
738 void
739 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
740 {
741   int n1, n2;
742 
743   if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
744     {
745       odr_violation_reported = true;
746       if (DECL_VIRTUAL_P (prevailing->decl))
747 	{
748 	  varpool_node *tmp = prevailing;
749 	  prevailing = vtable;
750 	  vtable = tmp;
751 	}
752       if (warning_at (DECL_SOURCE_LOCATION
753 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
754 		      OPT_Wodr,
755 		      "virtual table of type %qD violates one definition rule",
756 		      DECL_CONTEXT (vtable->decl)))
757 	inform (DECL_SOURCE_LOCATION (prevailing->decl),
758 		"variable of same assembler name as the virtual table is "
759 		"defined in another translation unit");
760       return;
761     }
762   if (!prevailing->definition || !vtable->definition)
763     return;
764 
765   /* If we do not stream ODR type info, do not bother to do useful compare.  */
766   if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
767       || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
768     return;
769 
770   odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
771 
772   if (class_type->odr_violated)
773     return;
774 
775   for (n1 = 0, n2 = 0; true; n1++, n2++)
776     {
777       struct ipa_ref *ref1, *ref2;
778       bool end1, end2;
779 
780       end1 = !prevailing->iterate_reference (n1, ref1);
781       end2 = !vtable->iterate_reference (n2, ref2);
782 
783       /* !DECL_VIRTUAL_P means RTTI entry;
784 	 We warn when RTTI is lost because non-RTTI previals; we silently
785 	 accept the other case.  */
786       while (!end2
787 	     && (end1
788 	         || (methods_equal_p (ref1->referred->decl,
789 				      ref2->referred->decl)
790 	             && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
791 	     && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
792 	{
793 	  if (!class_type->rtti_broken
794 	      && warning_at (DECL_SOURCE_LOCATION
795 			      (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
796 			     OPT_Wodr,
797 			     "virtual table of type %qD contains RTTI "
798 			     "information",
799 			     DECL_CONTEXT (vtable->decl)))
800 	    {
801 	      inform (DECL_SOURCE_LOCATION
802 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
803 		      "but is prevailed by one without from other translation "
804 		      "unit");
805 	      inform (DECL_SOURCE_LOCATION
806 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
807 		      "RTTI will not work on this type");
808 	      class_type->rtti_broken = true;
809 	    }
810 	  n2++;
811           end2 = !vtable->iterate_reference (n2, ref2);
812 	}
813       while (!end1
814 	     && (end2
815 	         || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
816 	             && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
817 	     && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
818 	{
819 	  n1++;
820           end1 = !prevailing->iterate_reference (n1, ref1);
821 	}
822 
823       /* Finished?  */
824       if (end1 && end2)
825 	{
826 	  /* Extra paranoia; compare the sizes.  We do not have information
827 	     about virtual inheritance offsets, so just be sure that these
828 	     match.
829 	     Do this as very last check so the not very informative error
830 	     is not output too often.  */
831 	  if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
832 	    {
833 	      class_type->odr_violated = true;
834 	      if (warning_at (DECL_SOURCE_LOCATION
835 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
836 			      OPT_Wodr,
837 			      "virtual table of type %qD violates "
838 			      "one definition rule  ",
839 			      DECL_CONTEXT (vtable->decl)))
840 		{
841 		  inform (DECL_SOURCE_LOCATION
842 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
843 			  "the conflicting type defined in another translation "
844 			  "unit has virtual table of different size");
845 		}
846 	    }
847 	  return;
848 	}
849 
850       if (!end1 && !end2)
851 	{
852 	  if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
853 	    continue;
854 
855 	  class_type->odr_violated = true;
856 
857 	  /* If the loops above stopped on non-virtual pointer, we have
858 	     mismatch in RTTI information mangling.  */
859 	  if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
860 	      && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
861 	    {
862 	      if (warning_at (DECL_SOURCE_LOCATION
863 				(TYPE_NAME (DECL_CONTEXT (vtable->decl))),
864 			      OPT_Wodr,
865 			      "virtual table of type %qD violates "
866 			      "one definition rule  ",
867 			      DECL_CONTEXT (vtable->decl)))
868 		{
869 		  inform (DECL_SOURCE_LOCATION
870 			    (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
871 			  "the conflicting type defined in another translation "
872 			  "unit with different RTTI information");
873 		}
874 	      return;
875 	    }
876 	  /* At this point both REF1 and REF2 points either to virtual table
877 	     or virtual method.  If one points to virtual table and other to
878 	     method we can complain the same way as if one table was shorter
879 	     than other pointing out the extra method.  */
880 	  if (TREE_CODE (ref1->referred->decl)
881 	      != TREE_CODE (ref2->referred->decl))
882 	    {
883 	      if (VAR_P (ref1->referred->decl))
884 		end1 = true;
885 	      else if (VAR_P (ref2->referred->decl))
886 		end2 = true;
887 	    }
888 	}
889 
890       class_type->odr_violated = true;
891 
892       /* Complain about size mismatch.  Either we have too many virutal
893  	 functions or too many virtual table pointers.  */
894       if (end1 || end2)
895 	{
896 	  if (end1)
897 	    {
898 	      varpool_node *tmp = prevailing;
899 	      prevailing = vtable;
900 	      vtable = tmp;
901 	      ref1 = ref2;
902 	    }
903 	  if (warning_at (DECL_SOURCE_LOCATION
904 			    (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
905 			  OPT_Wodr,
906 			  "virtual table of type %qD violates "
907 			  "one definition rule",
908 			  DECL_CONTEXT (vtable->decl)))
909 	    {
910 	      if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
911 		{
912 		  inform (DECL_SOURCE_LOCATION
913 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
914 			  "the conflicting type defined in another translation "
915 			  "unit");
916 		  inform (DECL_SOURCE_LOCATION
917 			    (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
918 			  "contains additional virtual method %qD",
919 			  ref1->referred->decl);
920 		}
921 	      else
922 		{
923 		  inform (DECL_SOURCE_LOCATION
924 			   (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
925 			  "the conflicting type defined in another translation "
926 			  "unit has virtual table with more entries");
927 		}
928 	    }
929 	  return;
930 	}
931 
932       /* And in the last case we have either mistmatch in between two virtual
933 	 methods or two virtual table pointers.  */
934       if (warning_at (DECL_SOURCE_LOCATION
935 			(TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
936 		      "virtual table of type %qD violates "
937 		      "one definition rule  ",
938 		      DECL_CONTEXT (vtable->decl)))
939 	{
940 	  if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
941 	    {
942 	      inform (DECL_SOURCE_LOCATION
943 			(TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
944 		      "the conflicting type defined in another translation "
945 		      "unit");
946 	      gcc_assert (TREE_CODE (ref2->referred->decl)
947 			  == FUNCTION_DECL);
948 	      inform (DECL_SOURCE_LOCATION
949 			 (ref1->referred->ultimate_alias_target ()->decl),
950 		      "virtual method %qD",
951 		      ref1->referred->ultimate_alias_target ()->decl);
952 	      inform (DECL_SOURCE_LOCATION
953 			 (ref2->referred->ultimate_alias_target ()->decl),
954 		      "ought to match virtual method %qD but does not",
955 		      ref2->referred->ultimate_alias_target ()->decl);
956 	    }
957 	  else
958 	    inform (DECL_SOURCE_LOCATION
959 		      (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
960 		    "the conflicting type defined in another translation "
961 		    "unit has virtual table with different contents");
962 	  return;
963 	}
964     }
965 }
966 
967 /* Output ODR violation warning about T1 and T2 with REASON.
968    Display location of ST1 and ST2 if REASON speaks about field or
969    method of the type.
970    If WARN is false, do nothing. Set WARNED if warning was indeed
971    output.  */
972 
973 void
974 warn_odr (tree t1, tree t2, tree st1, tree st2,
975 	  bool warn, bool *warned, const char *reason)
976 {
977   tree decl2 = TYPE_NAME (t2);
978   if (warned)
979     *warned = false;
980 
981   if (!warn || !TYPE_NAME(t1))
982     return;
983 
984   /* ODR warnings are output druing LTO streaming; we must apply location
985      cache for potential warnings to be output correctly.  */
986   if (lto_location_cache::current_cache)
987     lto_location_cache::current_cache->apply_location_cache ();
988 
989   if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
990 		   "type %qT violates the C++ One Definition Rule",
991 		   t1))
992     return;
993   if (!st1 && !st2)
994     ;
995   /* For FIELD_DECL support also case where one of fields is
996      NULL - this is used when the structures have mismatching number of
997      elements.  */
998   else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
999     {
1000       inform (DECL_SOURCE_LOCATION (decl2),
1001 	      "a different type is defined in another translation unit");
1002       if (!st1)
1003 	{
1004 	  st1 = st2;
1005 	  st2 = NULL;
1006 	}
1007       inform (DECL_SOURCE_LOCATION (st1),
1008 	      "the first difference of corresponding definitions is field %qD",
1009 	      st1);
1010       if (st2)
1011         decl2 = st2;
1012     }
1013   else if (TREE_CODE (st1) == FUNCTION_DECL)
1014     {
1015       inform (DECL_SOURCE_LOCATION (decl2),
1016 	      "a different type is defined in another translation unit");
1017       inform (DECL_SOURCE_LOCATION (st1),
1018 	      "the first difference of corresponding definitions is method %qD",
1019 	      st1);
1020       decl2 = st2;
1021     }
1022   else
1023     return;
1024   inform (DECL_SOURCE_LOCATION (decl2), reason);
1025 
1026   if (warned)
1027     *warned = true;
1028 }
1029 
1030 /* Return ture if T1 and T2 are incompatible and we want to recusively
1031    dive into them from warn_type_mismatch to give sensible answer.  */
1032 
1033 static bool
1034 type_mismatch_p (tree t1, tree t2)
1035 {
1036   if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1037       && !odr_types_equivalent_p (t1, t2))
1038     return true;
1039   return !types_compatible_p (t1, t2);
1040 }
1041 
1042 
1043 /* Types T1 and T2 was found to be incompatible in a context they can't
1044    (either used to declare a symbol of same assembler name or unified by
1045    ODR rule).  We already output warning about this, but if possible, output
1046    extra information on how the types mismatch.
1047 
1048    This is hard to do in general.  We basically handle the common cases.
1049 
1050    If LOC1 and LOC2 are meaningful locations, use it in the case the types
1051    themselves do no thave one.*/
1052 
1053 void
1054 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1055 {
1056   /* Location of type is known only if it has TYPE_NAME and the name is
1057      TYPE_DECL.  */
1058   location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1059 		      ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1060 		      : UNKNOWN_LOCATION;
1061   location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1062 		      ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1063 		      : UNKNOWN_LOCATION;
1064   bool loc_t2_useful = false;
1065 
1066   /* With LTO it is a common case that the location of both types match.
1067      See if T2 has a location that is different from T1. If so, we will
1068      inform user about the location.
1069      Do not consider the location passed to us in LOC1/LOC2 as those are
1070      already output.  */
1071   if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1072     {
1073       if (loc_t1 <= BUILTINS_LOCATION)
1074 	loc_t2_useful = true;
1075       else
1076 	{
1077 	  expanded_location xloc1 = expand_location (loc_t1);
1078 	  expanded_location xloc2 = expand_location (loc_t2);
1079 
1080 	  if (strcmp (xloc1.file, xloc2.file)
1081 	      || xloc1.line != xloc2.line
1082 	      || xloc1.column != xloc2.column)
1083 	    loc_t2_useful = true;
1084 	}
1085     }
1086 
1087   if (loc_t1 <= BUILTINS_LOCATION)
1088     loc_t1 = loc1;
1089   if (loc_t2 <= BUILTINS_LOCATION)
1090     loc_t2 = loc2;
1091 
1092   location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1093 
1094   /* It is a quite common bug to reference anonymous namespace type in
1095      non-anonymous namespace class.  */
1096   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1097       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1098     {
1099       if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1100 	{
1101 	  std::swap (t1, t2);
1102 	  std::swap (loc_t1, loc_t2);
1103 	}
1104       gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
1105 		  && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1106 		  && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
1107       /* Most of the time, the type names will match, do not be unnecesarily
1108          verbose.  */
1109       if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1)))
1110 	  != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2))))
1111         inform (loc_t1,
1112 	        "type %qT defined in anonymous namespace can not match "
1113 	        "type %qT across the translation unit boundary",
1114 	        t1, t2);
1115       else
1116         inform (loc_t1,
1117 	        "type %qT defined in anonymous namespace can not match "
1118 	        "across the translation unit boundary",
1119 	        t1);
1120       if (loc_t2_useful)
1121         inform (loc_t2,
1122 	        "the incompatible type defined in another translation unit");
1123       return;
1124     }
1125   /* If types have mangled ODR names and they are different, it is most
1126      informative to output those.
1127      This also covers types defined in different namespaces.  */
1128   if (TYPE_NAME (t1) && TYPE_NAME (t2)
1129       && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1130       && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1131       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1132       && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1133       && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1134 	 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1135     {
1136       char *name1 = xstrdup (cplus_demangle
1137 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1138 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1139       char *name2 = cplus_demangle
1140 	 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1141 	  DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1142       if (name1 && name2 && strcmp (name1, name2))
1143 	{
1144 	  inform (loc_t1,
1145 		  "type name %qs should match type name %qs",
1146 		  name1, name2);
1147 	  if (loc_t2_useful)
1148 	    inform (loc_t2,
1149 		    "the incompatible type is defined here");
1150 	  free (name1);
1151 	  return;
1152 	}
1153       free (name1);
1154     }
1155   /* A tricky case are compound types.  Often they appear the same in source
1156      code and the mismatch is dragged in by type they are build from.
1157      Look for those differences in subtypes and try to be informative.  In other
1158      cases just output nothing because the source code is probably different
1159      and in this case we already output a all necessary info.  */
1160   if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1161     {
1162       if (TREE_CODE (t1) == TREE_CODE (t2))
1163 	{
1164 	  if (TREE_CODE (t1) == ARRAY_TYPE
1165 	      && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1166 	    {
1167 	      tree i1 = TYPE_DOMAIN (t1);
1168 	      tree i2 = TYPE_DOMAIN (t2);
1169 
1170 	      if (i1 && i2
1171 		  && TYPE_MAX_VALUE (i1)
1172 		  && TYPE_MAX_VALUE (i2)
1173 		  && !operand_equal_p (TYPE_MAX_VALUE (i1),
1174 				       TYPE_MAX_VALUE (i2), 0))
1175 		{
1176 		  inform (loc,
1177 			  "array types have different bounds");
1178 		  return;
1179 		}
1180 	    }
1181 	  if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1182 	      && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1183 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1184 	  else if (TREE_CODE (t1) == METHOD_TYPE
1185 		   || TREE_CODE (t1) == FUNCTION_TYPE)
1186 	    {
1187 	      tree parms1 = NULL, parms2 = NULL;
1188 	      int count = 1;
1189 
1190 	      if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1191 		{
1192 		  inform (loc, "return value type mismatch");
1193 		  warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1194 				       loc_t2);
1195 		  return;
1196 		}
1197 	      if (prototype_p (t1) && prototype_p (t2))
1198 		for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1199 		     parms1 && parms2;
1200 		     parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1201 		     count++)
1202 		  {
1203 		    if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1204 		      {
1205 			if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1206 			  inform (loc,
1207 				  "implicit this pointer type mismatch");
1208 			else
1209 			  inform (loc,
1210 				  "type mismatch in parameter %i",
1211 				  count - (TREE_CODE (t1) == METHOD_TYPE));
1212 			warn_types_mismatch (TREE_VALUE (parms1),
1213 					     TREE_VALUE (parms2),
1214 					     loc_t1, loc_t2);
1215 			return;
1216 		      }
1217 		  }
1218 	      if (parms1 || parms2)
1219 		{
1220 		  inform (loc,
1221 			  "types have different parameter counts");
1222 		  return;
1223 		}
1224 	    }
1225 	}
1226       return;
1227     }
1228 
1229   if (types_odr_comparable (t1, t2, true)
1230       && types_same_for_odr (t1, t2, true))
1231     inform (loc_t1,
1232 	    "type %qT itself violates the C++ One Definition Rule", t1);
1233   /* Prevent pointless warnings like "struct aa" should match "struct aa".  */
1234   else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1235 	   && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1236     return;
1237   else
1238     inform (loc_t1, "type %qT should match type %qT",
1239 	    t1, t2);
1240   if (loc_t2_useful)
1241     inform (loc_t2, "the incompatible type is defined here");
1242 }
1243 
1244 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparsion.  */
1245 
1246 static bool
1247 skip_in_fields_list_p (tree t)
1248 {
1249   if (TREE_CODE (t) != FIELD_DECL)
1250     return true;
1251   /* C++ FE introduces zero sized fields depending on -std setting, see
1252      PR89358.  */
1253   if (DECL_SIZE (t)
1254       && integer_zerop (DECL_SIZE (t))
1255       && DECL_ARTIFICIAL (t)
1256       && DECL_IGNORED_P (t)
1257       && !DECL_NAME (t))
1258     return true;
1259   return false;
1260 }
1261 
1262 /* Compare T1 and T2, report ODR violations if WARN is true and set
1263    WARNED to true if anything is reported.  Return true if types match.
1264    If true is returned, the types are also compatible in the sense of
1265    gimple_canonical_types_compatible_p.
1266    If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1267    about the type if the type itself do not have location.  */
1268 
1269 static bool
1270 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1271 			hash_set<type_pair> *visited,
1272 			location_t loc1, location_t loc2)
1273 {
1274   /* Check first for the obvious case of pointer identity.  */
1275   if (t1 == t2)
1276     return true;
1277   gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1278   gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1279 
1280   /* Can't be the same type if the types don't have the same code.  */
1281   if (TREE_CODE (t1) != TREE_CODE (t2))
1282     {
1283       warn_odr (t1, t2, NULL, NULL, warn, warned,
1284 	        G_("a different type is defined in another translation unit"));
1285       return false;
1286     }
1287 
1288   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1289     {
1290       warn_odr (t1, t2, NULL, NULL, warn, warned,
1291 	        G_("a type with different qualifiers is defined in another "
1292 		   "translation unit"));
1293       return false;
1294     }
1295 
1296   if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1297       || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1298     {
1299       /* We can not trip this when comparing ODR types, only when trying to
1300 	 match different ODR derivations from different declarations.
1301 	 So WARN should be always false.  */
1302       gcc_assert (!warn);
1303       return false;
1304     }
1305 
1306   if (comp_type_attributes (t1, t2) != 1)
1307     {
1308       warn_odr (t1, t2, NULL, NULL, warn, warned,
1309 	        G_("a type with different attributes "
1310 		   "is defined in another translation unit"));
1311       return false;
1312     }
1313 
1314   if (TREE_CODE (t1) == ENUMERAL_TYPE
1315       && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1316     {
1317       tree v1, v2;
1318       for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1319 	   v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1320 	{
1321 	  if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1322 	    {
1323 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1324 			G_("an enum with different value name"
1325 			   " is defined in another translation unit"));
1326 	      return false;
1327 	    }
1328 	  if (TREE_VALUE (v1) != TREE_VALUE (v2)
1329 	      && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1330 				   DECL_INITIAL (TREE_VALUE (v2)), 0))
1331 	    {
1332 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1333 			G_("an enum with different values is defined"
1334 			   " in another translation unit"));
1335 	      return false;
1336 	    }
1337 	}
1338       if (v1 || v2)
1339 	{
1340 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1341 		    G_("an enum with mismatching number of values "
1342 		       "is defined in another translation unit"));
1343 	  return false;
1344 	}
1345     }
1346 
1347   /* Non-aggregate types can be handled cheaply.  */
1348   if (INTEGRAL_TYPE_P (t1)
1349       || SCALAR_FLOAT_TYPE_P (t1)
1350       || FIXED_POINT_TYPE_P (t1)
1351       || TREE_CODE (t1) == VECTOR_TYPE
1352       || TREE_CODE (t1) == COMPLEX_TYPE
1353       || TREE_CODE (t1) == OFFSET_TYPE
1354       || POINTER_TYPE_P (t1))
1355     {
1356       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1357 	{
1358 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1359 		    G_("a type with different precision is defined "
1360 		       "in another translation unit"));
1361 	  return false;
1362 	}
1363       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1364 	{
1365 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1366 		    G_("a type with different signedness is defined "
1367 		       "in another translation unit"));
1368 	  return false;
1369 	}
1370 
1371       if (TREE_CODE (t1) == INTEGER_TYPE
1372 	  && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1373 	{
1374 	  /* char WRT uint_8?  */
1375 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1376 		    G_("a different type is defined in another "
1377 		       "translation unit"));
1378 	  return false;
1379 	}
1380 
1381       /* For canonical type comparisons we do not want to build SCCs
1382 	 so we cannot compare pointed-to types.  But we can, for now,
1383 	 require the same pointed-to type kind and match what
1384 	 useless_type_conversion_p would do.  */
1385       if (POINTER_TYPE_P (t1))
1386 	{
1387 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1388 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1389 	    {
1390 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1391 			G_("it is defined as a pointer in different address "
1392 			   "space in another translation unit"));
1393 	      return false;
1394 	    }
1395 
1396 	  if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1397 					  visited, loc1, loc2))
1398 	    {
1399 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1400 			G_("it is defined as a pointer to different type "
1401 			   "in another translation unit"));
1402 	      if (warn && warned)
1403 	        warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1404 				     loc1, loc2);
1405 	      return false;
1406 	    }
1407 	}
1408 
1409       if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1410 	  && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1411 					 visited, loc1, loc2))
1412 	{
1413 	  /* Probably specific enough.  */
1414 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1415 		    G_("a different type is defined "
1416 		       "in another translation unit"));
1417 	  if (warn && warned)
1418 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1419 	  return false;
1420 	}
1421     }
1422   /* Do type-specific comparisons.  */
1423   else switch (TREE_CODE (t1))
1424     {
1425     case ARRAY_TYPE:
1426       {
1427 	/* Array types are the same if the element types are the same and
1428 	   the number of elements are the same.  */
1429 	if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1430 					visited, loc1, loc2))
1431 	  {
1432 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1433 		      G_("a different type is defined in another "
1434 			 "translation unit"));
1435 	    if (warn && warned)
1436 	      warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1437 	  }
1438 	gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1439 	gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1440 		    == TYPE_NONALIASED_COMPONENT (t2));
1441 
1442 	tree i1 = TYPE_DOMAIN (t1);
1443 	tree i2 = TYPE_DOMAIN (t2);
1444 
1445 	/* For an incomplete external array, the type domain can be
1446 	   NULL_TREE.  Check this condition also.  */
1447 	if (i1 == NULL_TREE || i2 == NULL_TREE)
1448 	  return true;
1449 
1450 	tree min1 = TYPE_MIN_VALUE (i1);
1451 	tree min2 = TYPE_MIN_VALUE (i2);
1452 	tree max1 = TYPE_MAX_VALUE (i1);
1453 	tree max2 = TYPE_MAX_VALUE (i2);
1454 
1455 	/* In C++, minimums should be always 0.  */
1456 	gcc_assert (min1 == min2);
1457 	if (!operand_equal_p (max1, max2, 0))
1458 	  {
1459 	    warn_odr (t1, t2, NULL, NULL, warn, warned,
1460 		      G_("an array of different size is defined "
1461 			 "in another translation unit"));
1462 	    return false;
1463 	  }
1464       }
1465     break;
1466 
1467     case METHOD_TYPE:
1468     case FUNCTION_TYPE:
1469       /* Function types are the same if the return type and arguments types
1470 	 are the same.  */
1471       if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1472 				      visited, loc1, loc2))
1473 	{
1474 	  warn_odr (t1, t2, NULL, NULL, warn, warned,
1475 		    G_("has different return value "
1476 		       "in another translation unit"));
1477 	  if (warn && warned)
1478 	    warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1479 	  return false;
1480 	}
1481 
1482       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1483 	  || !prototype_p (t1) || !prototype_p (t2))
1484 	return true;
1485       else
1486 	{
1487 	  tree parms1, parms2;
1488 
1489 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1490 	       parms1 && parms2;
1491 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1492 	    {
1493 	      if (!odr_subtypes_equivalent_p
1494 		     (TREE_VALUE (parms1), TREE_VALUE (parms2), visited,
1495 		      loc1, loc2))
1496 		{
1497 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1498 			    G_("has different parameters in another "
1499 			       "translation unit"));
1500 		  if (warn && warned)
1501 		    warn_types_mismatch (TREE_VALUE (parms1),
1502 					 TREE_VALUE (parms2), loc1, loc2);
1503 		  return false;
1504 		}
1505 	    }
1506 
1507 	  if (parms1 || parms2)
1508 	    {
1509 	      warn_odr (t1, t2, NULL, NULL, warn, warned,
1510 			G_("has different parameters "
1511 			   "in another translation unit"));
1512 	      return false;
1513 	    }
1514 
1515 	  return true;
1516 	}
1517 
1518     case RECORD_TYPE:
1519     case UNION_TYPE:
1520     case QUAL_UNION_TYPE:
1521       {
1522 	tree f1, f2;
1523 
1524 	/* For aggregate types, all the fields must be the same.  */
1525 	if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1526 	  {
1527 	    if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1528 	        && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1529 		   != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1530 	      {
1531 		if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1532 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1533 			    G_("a type defined in another translation unit "
1534 			       "is not polymorphic"));
1535 		else
1536 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1537 			    G_("a type defined in another translation unit "
1538 			       "is polymorphic"));
1539 		return false;
1540 	      }
1541 	    for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1542 		 f1 || f2;
1543 		 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1544 	      {
1545 		/* Skip non-fields.  */
1546 		while (f1 && skip_in_fields_list_p (f1))
1547 		  f1 = TREE_CHAIN (f1);
1548 		while (f2 && skip_in_fields_list_p (f2))
1549 		  f2 = TREE_CHAIN (f2);
1550 		if (!f1 || !f2)
1551 		  break;
1552 		if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1553 		  {
1554 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1555 			      G_("a type with different virtual table pointers"
1556 			         " is defined in another translation unit"));
1557 		    return false;
1558 		  }
1559 		if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1560 		  {
1561 		    warn_odr (t1, t2, NULL, NULL, warn, warned,
1562 			      G_("a type with different bases is defined "
1563 				 "in another translation unit"));
1564 		    return false;
1565 		  }
1566 		if (DECL_NAME (f1) != DECL_NAME (f2)
1567 		    && !DECL_ARTIFICIAL (f1))
1568 		  {
1569 		    warn_odr (t1, t2, f1, f2, warn, warned,
1570 			      G_("a field with different name is defined "
1571 				 "in another translation unit"));
1572 		    return false;
1573 		  }
1574 		if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1575 						TREE_TYPE (f2), visited,
1576 						loc1, loc2))
1577 		  {
1578 		    /* Do not warn about artificial fields and just go into
1579  		       generic field mismatch warning.  */
1580 		    if (DECL_ARTIFICIAL (f1))
1581 		      break;
1582 
1583 		    warn_odr (t1, t2, f1, f2, warn, warned,
1584 			      G_("a field of same name but different type "
1585 				 "is defined in another translation unit"));
1586 		    if (warn && warned)
1587 		      warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1588 		    return false;
1589 		  }
1590 		if (!gimple_compare_field_offset (f1, f2))
1591 		  {
1592 		    /* Do not warn about artificial fields and just go into
1593 		       generic field mismatch warning.  */
1594 		    if (DECL_ARTIFICIAL (f1))
1595 		      break;
1596 		    warn_odr (t1, t2, f1, f2, warn, warned,
1597 			      G_("fields have different layout "
1598 				 "in another translation unit"));
1599 		    return false;
1600 		  }
1601 		if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1602 		  {
1603 		    warn_odr (t1, t2, f1, f2, warn, warned,
1604 			      G_("one field is bitfield while other is not"));
1605 		    return false;
1606 		  }
1607 		else
1608 		  gcc_assert (DECL_NONADDRESSABLE_P (f1)
1609 			      == DECL_NONADDRESSABLE_P (f2));
1610 	      }
1611 
1612 	    /* If one aggregate has more fields than the other, they
1613 	       are not the same.  */
1614 	    if (f1 || f2)
1615 	      {
1616 		if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1617 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1618 			    G_("a type with different virtual table pointers"
1619 			       " is defined in another translation unit"));
1620 		else if ((f1 && DECL_ARTIFICIAL (f1))
1621 		         || (f2 && DECL_ARTIFICIAL (f2)))
1622 		  warn_odr (t1, t2, NULL, NULL, warn, warned,
1623 			    G_("a type with different bases is defined "
1624 			       "in another translation unit"));
1625 		else
1626 		  warn_odr (t1, t2, f1, f2, warn, warned,
1627 			    G_("a type with different number of fields "
1628 			       "is defined in another translation unit"));
1629 
1630 		return false;
1631 	      }
1632 	  }
1633 	break;
1634       }
1635     case VOID_TYPE:
1636     case NULLPTR_TYPE:
1637       break;
1638 
1639     default:
1640       debug_tree (t1);
1641       gcc_unreachable ();
1642     }
1643 
1644   /* Those are better to come last as they are utterly uninformative.  */
1645   if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1646       && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1647     {
1648       warn_odr (t1, t2, NULL, NULL, warn, warned,
1649 		G_("a type with different size "
1650 		   "is defined in another translation unit"));
1651       return false;
1652     }
1653   if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1654       && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1655     {
1656       warn_odr (t1, t2, NULL, NULL, warn, warned,
1657 		G_("a type with different alignment "
1658 		   "is defined in another translation unit"));
1659       return false;
1660     }
1661   gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1662 	      || operand_equal_p (TYPE_SIZE_UNIT (t1),
1663 				  TYPE_SIZE_UNIT (t2), 0));
1664   return true;
1665 }
1666 
1667 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule.  */
1668 
1669 bool
1670 odr_types_equivalent_p (tree type1, tree type2)
1671 {
1672   gcc_checking_assert (odr_or_derived_type_p (type1)
1673 		       && odr_or_derived_type_p (type2));
1674 
1675   hash_set<type_pair> visited;
1676   return odr_types_equivalent_p (type1, type2, false, NULL,
1677 			         &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1678 }
1679 
1680 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1681    from VAL->type.  This may happen in LTO where tree merging did not merge
1682    all variants of the same type or due to ODR violation.
1683 
1684    Analyze and report ODR violations and add type to duplicate list.
1685    If TYPE is more specified than VAL->type, prevail VAL->type.  Also if
1686    this is first time we see definition of a class return true so the
1687    base types are analyzed.  */
1688 
1689 static bool
1690 add_type_duplicate (odr_type val, tree type)
1691 {
1692   bool build_bases = false;
1693   bool prevail = false;
1694   bool odr_must_violate = false;
1695 
1696   if (!val->types_set)
1697     val->types_set = new hash_set<tree>;
1698 
1699   /* Chose polymorphic type as leader (this happens only in case of ODR
1700      violations.  */
1701   if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1702        && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1703       && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1704           || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1705     {
1706       prevail = true;
1707       build_bases = true;
1708     }
1709   /* Always prefer complete type to be the leader.  */
1710   else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1711     {
1712       prevail = true;
1713       build_bases = TYPE_BINFO (type);
1714     }
1715   else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1716     ;
1717   else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1718 	   && TREE_CODE (type) == ENUMERAL_TYPE
1719 	   && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1720     prevail = true;
1721   else if (TREE_CODE (val->type) == RECORD_TYPE
1722 	   && TREE_CODE (type) == RECORD_TYPE
1723 	   && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1724     {
1725       gcc_assert (!val->bases.length ());
1726       build_bases = true;
1727       prevail = true;
1728     }
1729 
1730   if (prevail)
1731     std::swap (val->type, type);
1732 
1733   val->types_set->add (type);
1734 
1735   /* If we now have a mangled name, be sure to record it to val->type
1736      so ODR hash can work.  */
1737 
1738   if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1739     SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1740 			     DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1741 
1742   bool merge = true;
1743   bool base_mismatch = false;
1744   unsigned int i;
1745   bool warned = false;
1746   hash_set<type_pair> visited;
1747 
1748   gcc_assert (in_lto_p);
1749   vec_safe_push (val->types, type);
1750 
1751   /* If both are class types, compare the bases.  */
1752   if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1753       && TREE_CODE (val->type) == RECORD_TYPE
1754       && TREE_CODE (type) == RECORD_TYPE
1755       && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1756     {
1757       if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1758 	  != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1759 	{
1760 	  if (!flag_ltrans && !warned && !val->odr_violated)
1761 	    {
1762 	      tree extra_base;
1763 	      warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1764 			"a type with the same name but different "
1765 			"number of polymorphic bases is "
1766 			"defined in another translation unit");
1767 	      if (warned)
1768 		{
1769 		  if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1770 		      > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1771 		    extra_base = BINFO_BASE_BINFO
1772 				 (TYPE_BINFO (type),
1773 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1774 		  else
1775 		    extra_base = BINFO_BASE_BINFO
1776 				 (TYPE_BINFO (val->type),
1777 				  BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1778 		  tree extra_base_type = BINFO_TYPE (extra_base);
1779 		  inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1780 			  "the extra base is defined here");
1781 		}
1782 	    }
1783 	  base_mismatch = true;
1784 	}
1785       else
1786 	for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1787 	  {
1788 	    tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1789 	    tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1790 	    tree type1 = BINFO_TYPE (base1);
1791 	    tree type2 = BINFO_TYPE (base2);
1792 
1793 	    if (types_odr_comparable (type1, type2))
1794 	      {
1795 		if (!types_same_for_odr (type1, type2))
1796 		  base_mismatch = true;
1797 	      }
1798 	    else
1799 	      if (!odr_types_equivalent_p (type1, type2))
1800 		base_mismatch = true;
1801 	    if (base_mismatch)
1802 	      {
1803 		if (!warned && !val->odr_violated)
1804 		  {
1805 		    warn_odr (type, val->type, NULL, NULL,
1806 			      !warned, &warned,
1807 			      "a type with the same name but different base "
1808 			      "type is defined in another translation unit");
1809 		    if (warned)
1810 		      warn_types_mismatch (type1, type2,
1811 					    UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1812 		  }
1813 		break;
1814 	      }
1815 	    if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1816 	      {
1817 		base_mismatch = true;
1818 		if (!warned && !val->odr_violated)
1819 		  warn_odr (type, val->type, NULL, NULL,
1820 			    !warned, &warned,
1821 			    "a type with the same name but different base "
1822 			    "layout is defined in another translation unit");
1823 		break;
1824 	      }
1825 	    /* One of bases is not of complete type.  */
1826 	    if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1827 	      {
1828 		/* If we have a polymorphic type info specified for TYPE1
1829 		   but not for TYPE2 we possibly missed a base when recording
1830 		   VAL->type earlier.
1831 		   Be sure this does not happen.  */
1832 		if (TYPE_BINFO (type1)
1833 		    && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1834 		    && !build_bases)
1835 		  odr_must_violate = true;
1836 	        break;
1837 	      }
1838 	    /* One base is polymorphic and the other not.
1839 	       This ought to be diagnosed earlier, but do not ICE in the
1840 	       checking bellow.  */
1841 	    else if (TYPE_BINFO (type1)
1842 		     && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1843 		        != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1844 	      {
1845 		if (!warned && !val->odr_violated)
1846 		  warn_odr (type, val->type, NULL, NULL,
1847 			    !warned, &warned,
1848 			    "a base of the type is polymorphic only in one "
1849 			    "translation unit");
1850 		base_mismatch = true;
1851 		break;
1852 	      }
1853 	  }
1854       if (base_mismatch)
1855 	{
1856 	  merge = false;
1857 	  odr_violation_reported = true;
1858 	  val->odr_violated = true;
1859 
1860 	  if (symtab->dump_file)
1861 	    {
1862 	      fprintf (symtab->dump_file, "ODR base violation\n");
1863 
1864 	      print_node (symtab->dump_file, "", val->type, 0);
1865 	      putc ('\n',symtab->dump_file);
1866 	      print_node (symtab->dump_file, "", type, 0);
1867 	      putc ('\n',symtab->dump_file);
1868 	    }
1869 	}
1870     }
1871 
1872   /* Next compare memory layout.
1873      The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1874      We must apply the location cache to ensure that they are valid
1875      before we can pass them to odr_types_equivalent_p (PR lto/83121).  */
1876   if (lto_location_cache::current_cache)
1877     lto_location_cache::current_cache->apply_location_cache ();
1878   if (!odr_types_equivalent_p (val->type, type,
1879 			       !flag_ltrans && !val->odr_violated && !warned,
1880 			       &warned, &visited,
1881 			       DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1882 			       DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1883     {
1884       merge = false;
1885       odr_violation_reported = true;
1886       val->odr_violated = true;
1887     }
1888   gcc_assert (val->odr_violated || !odr_must_violate);
1889   /* Sanity check that all bases will be build same way again.  */
1890   if (flag_checking
1891       && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1892       && TREE_CODE (val->type) == RECORD_TYPE
1893       && TREE_CODE (type) == RECORD_TYPE
1894       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1895       && !val->odr_violated
1896       && !base_mismatch && val->bases.length ())
1897     {
1898       unsigned int num_poly_bases = 0;
1899       unsigned int j;
1900 
1901       for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1902 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1903 					 (TYPE_BINFO (type), i)))
1904 	  num_poly_bases++;
1905       gcc_assert (num_poly_bases == val->bases.length ());
1906       for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1907 	   i++)
1908 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1909 				       (TYPE_BINFO (type), i)))
1910 	  {
1911 	    odr_type base = get_odr_type
1912 			       (BINFO_TYPE
1913 				  (BINFO_BASE_BINFO (TYPE_BINFO (type),
1914 						     i)),
1915 				true);
1916 	    gcc_assert (val->bases[j] == base);
1917 	    j++;
1918 	  }
1919     }
1920 
1921 
1922   /* Regularize things a little.  During LTO same types may come with
1923      different BINFOs.  Either because their virtual table was
1924      not merged by tree merging and only later at decl merging or
1925      because one type comes with external vtable, while other
1926      with internal.  We want to merge equivalent binfos to conserve
1927      memory and streaming overhead.
1928 
1929      The external vtables are more harmful: they contain references
1930      to external declarations of methods that may be defined in the
1931      merged LTO unit.  For this reason we absolutely need to remove
1932      them and replace by internal variants. Not doing so will lead
1933      to incomplete answers from possible_polymorphic_call_targets.
1934 
1935      FIXME: disable for now; because ODR types are now build during
1936      streaming in, the variants do not need to be linked to the type,
1937      yet.  We need to do the merging in cleanup pass to be implemented
1938      soon.  */
1939   if (!flag_ltrans && merge
1940       && 0
1941       && TREE_CODE (val->type) == RECORD_TYPE
1942       && TREE_CODE (type) == RECORD_TYPE
1943       && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1944       && TYPE_MAIN_VARIANT (type) == type
1945       && TYPE_MAIN_VARIANT (val->type) == val->type
1946       && BINFO_VTABLE (TYPE_BINFO (val->type))
1947       && BINFO_VTABLE (TYPE_BINFO (type)))
1948     {
1949       tree master_binfo = TYPE_BINFO (val->type);
1950       tree v1 = BINFO_VTABLE (master_binfo);
1951       tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1952 
1953       if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1954 	{
1955 	  gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1956 		      && operand_equal_p (TREE_OPERAND (v1, 1),
1957 					  TREE_OPERAND (v2, 1), 0));
1958 	  v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1959 	  v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1960 	}
1961       gcc_assert (DECL_ASSEMBLER_NAME (v1)
1962 		  == DECL_ASSEMBLER_NAME (v2));
1963 
1964       if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1965 	{
1966 	  unsigned int i;
1967 
1968 	  set_type_binfo (val->type, TYPE_BINFO (type));
1969 	  for (i = 0; i < val->types->length (); i++)
1970 	    {
1971 	      if (TYPE_BINFO ((*val->types)[i])
1972 		  == master_binfo)
1973 		set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1974 	    }
1975 	  BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1976 	}
1977       else
1978 	set_type_binfo (type, master_binfo);
1979     }
1980   return build_bases;
1981 }
1982 
1983 /* Get ODR type hash entry for TYPE.  If INSERT is true, create
1984    possibly new entry.  */
1985 
1986 odr_type
1987 get_odr_type (tree type, bool insert)
1988 {
1989   odr_type_d **slot = NULL;
1990   odr_type_d **vtable_slot = NULL;
1991   odr_type val = NULL;
1992   hashval_t hash;
1993   bool build_bases = false;
1994   bool insert_to_odr_array = false;
1995   int base_id = -1;
1996 
1997   type = main_odr_variant (type);
1998 
1999   gcc_checking_assert (can_be_name_hashed_p (type)
2000 		       || can_be_vtable_hashed_p (type));
2001 
2002   /* Lookup entry, first try name hash, fallback to vtable hash.  */
2003   if (can_be_name_hashed_p (type))
2004     {
2005       hash = hash_odr_name (type);
2006       slot = odr_hash->find_slot_with_hash (type, hash,
2007 					    insert ? INSERT : NO_INSERT);
2008     }
2009   if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
2010     {
2011       hash = hash_odr_vtable (type);
2012       vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2013 					           insert ? INSERT : NO_INSERT);
2014     }
2015 
2016   if (!slot && !vtable_slot)
2017     return NULL;
2018 
2019   /* See if we already have entry for type.  */
2020   if ((slot && *slot) || (vtable_slot && *vtable_slot))
2021     {
2022       if (slot && *slot)
2023 	{
2024 	  val = *slot;
2025 	  if (flag_checking
2026 	      && in_lto_p && can_be_vtable_hashed_p (type))
2027 	    {
2028 	      hash = hash_odr_vtable (type);
2029 	      vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2030 						                  NO_INSERT);
2031 	      gcc_assert (!vtable_slot || *vtable_slot == *slot);
2032 	      vtable_slot = NULL;
2033 	    }
2034 	}
2035       else if (*vtable_slot)
2036 	val = *vtable_slot;
2037 
2038       if (val->type != type
2039 	  && (!val->types_set || !val->types_set->add (type)))
2040 	{
2041 	  gcc_assert (insert);
2042 	  /* We have type duplicate, but it may introduce vtable name or
2043  	     mangled name; be sure to keep hashes in sync.  */
2044 	  if (in_lto_p && can_be_vtable_hashed_p (type)
2045 	      && (!vtable_slot || !*vtable_slot))
2046 	    {
2047 	      if (!vtable_slot)
2048 		{
2049 		  hash = hash_odr_vtable (type);
2050 		  vtable_slot = odr_vtable_hash->find_slot_with_hash
2051 			     (type, hash, INSERT);
2052 		  gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2053 		}
2054 	      *vtable_slot = val;
2055 	    }
2056 	  if (slot && !*slot)
2057 	    *slot = val;
2058 	  build_bases = add_type_duplicate (val, type);
2059 	}
2060     }
2061   else
2062     {
2063       val = ggc_cleared_alloc<odr_type_d> ();
2064       val->type = type;
2065       val->bases = vNULL;
2066       val->derived_types = vNULL;
2067       if (type_with_linkage_p (type))
2068         val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2069       else
2070 	val->anonymous_namespace = 0;
2071       build_bases = COMPLETE_TYPE_P (val->type);
2072       insert_to_odr_array = true;
2073       if (slot)
2074         *slot = val;
2075       if (vtable_slot)
2076 	*vtable_slot = val;
2077     }
2078 
2079   if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2080       && type_with_linkage_p (type)
2081       && type == TYPE_MAIN_VARIANT (type))
2082     {
2083       tree binfo = TYPE_BINFO (type);
2084       unsigned int i;
2085 
2086       gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2087 
2088       val->all_derivations_known = type_all_derivations_known_p (type);
2089       for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2090 	/* For now record only polymorphic types. other are
2091 	   pointless for devirtualization and we can not precisely
2092 	   determine ODR equivalency of these during LTO.  */
2093 	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2094 	  {
2095 	    tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2096 	    odr_type base = get_odr_type (base_type, true);
2097 	    gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2098 	    base->derived_types.safe_push (val);
2099 	    val->bases.safe_push (base);
2100 	    if (base->id > base_id)
2101 	      base_id = base->id;
2102 	  }
2103       }
2104   /* Ensure that type always appears after bases.  */
2105   if (insert_to_odr_array)
2106     {
2107       if (odr_types_ptr)
2108         val->id = odr_types.length ();
2109       vec_safe_push (odr_types_ptr, val);
2110     }
2111   else if (base_id > val->id)
2112     {
2113       odr_types[val->id] = 0;
2114       /* Be sure we did not recorded any derived types; these may need
2115 	 renumbering too.  */
2116       gcc_assert (val->derived_types.length() == 0);
2117       val->id = odr_types.length ();
2118       vec_safe_push (odr_types_ptr, val);
2119     }
2120   return val;
2121 }
2122 
2123 /* Add TYPE od ODR type hash.  */
2124 
2125 void
2126 register_odr_type (tree type)
2127 {
2128   if (!odr_hash)
2129     {
2130       odr_hash = new odr_hash_type (23);
2131       if (in_lto_p)
2132         odr_vtable_hash = new odr_vtable_hash_type (23);
2133     }
2134   /* Arrange things to be nicer and insert main variants first.
2135      ??? fundamental prerecorded types do not have mangled names; this
2136      makes it possible that non-ODR type is main_odr_variant of ODR type.
2137      Things may get smoother if LTO FE set mangled name of those types same
2138      way as C++ FE does.  */
2139   if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2140       && odr_type_p (TYPE_MAIN_VARIANT (type)))
2141     get_odr_type (TYPE_MAIN_VARIANT (type), true);
2142   if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2143     get_odr_type (type, true);
2144 }
2145 
2146 /* Return true if type is known to have no derivations.  */
2147 
2148 bool
2149 type_known_to_have_no_derivations_p (tree t)
2150 {
2151   return (type_all_derivations_known_p (t)
2152 	  && (TYPE_FINAL_P (t)
2153 	      || (odr_hash
2154 		  && !get_odr_type (t, true)->derived_types.length())));
2155 }
2156 
2157 /* Dump ODR type T and all its derived types.  INDENT specifies indentation for
2158    recursive printing.  */
2159 
2160 static void
2161 dump_odr_type (FILE *f, odr_type t, int indent=0)
2162 {
2163   unsigned int i;
2164   fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2165   print_generic_expr (f, t->type, TDF_SLIM);
2166   fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2167   fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2168   if (TYPE_NAME (t->type))
2169     {
2170       /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2171 	       DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2172 	       DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2173       if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2174         fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2175 		 IDENTIFIER_POINTER
2176 		   (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2177     }
2178   if (t->bases.length ())
2179     {
2180       fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2181       for (i = 0; i < t->bases.length (); i++)
2182 	fprintf (f, " %i", t->bases[i]->id);
2183       fprintf (f, "\n");
2184     }
2185   if (t->derived_types.length ())
2186     {
2187       fprintf (f, "%*s derived types:\n", indent * 2, "");
2188       for (i = 0; i < t->derived_types.length (); i++)
2189         dump_odr_type (f, t->derived_types[i], indent + 1);
2190     }
2191   fprintf (f, "\n");
2192 }
2193 
2194 /* Dump the type inheritance graph.  */
2195 
2196 static void
2197 dump_type_inheritance_graph (FILE *f)
2198 {
2199   unsigned int i;
2200   if (!odr_types_ptr)
2201     return;
2202   fprintf (f, "\n\nType inheritance graph:\n");
2203   for (i = 0; i < odr_types.length (); i++)
2204     {
2205       if (odr_types[i] && odr_types[i]->bases.length () == 0)
2206 	dump_odr_type (f, odr_types[i]);
2207     }
2208   for (i = 0; i < odr_types.length (); i++)
2209     {
2210       if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2211 	{
2212 	  unsigned int j;
2213 	  fprintf (f, "Duplicate tree types for odr type %i\n", i);
2214 	  print_node (f, "", odr_types[i]->type, 0);
2215 	  for (j = 0; j < odr_types[i]->types->length (); j++)
2216 	    {
2217 	      tree t;
2218 	      fprintf (f, "duplicate #%i\n", j);
2219 	      print_node (f, "", (*odr_types[i]->types)[j], 0);
2220 	      t = (*odr_types[i]->types)[j];
2221 	      while (TYPE_P (t) && TYPE_CONTEXT (t))
2222 		{
2223 		  t = TYPE_CONTEXT (t);
2224 	          print_node (f, "", t, 0);
2225 		}
2226 	      putc ('\n',f);
2227 	    }
2228 	}
2229     }
2230 }
2231 
2232 /* Initialize IPA devirt and build inheritance tree graph.  */
2233 
2234 void
2235 build_type_inheritance_graph (void)
2236 {
2237   struct symtab_node *n;
2238   FILE *inheritance_dump_file;
2239   dump_flags_t flags;
2240 
2241   if (odr_hash)
2242     return;
2243   timevar_push (TV_IPA_INHERITANCE);
2244   inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2245   odr_hash = new odr_hash_type (23);
2246   if (in_lto_p)
2247     odr_vtable_hash = new odr_vtable_hash_type (23);
2248 
2249   /* We reconstruct the graph starting of types of all methods seen in the
2250      unit.  */
2251   FOR_EACH_SYMBOL (n)
2252     if (is_a <cgraph_node *> (n)
2253 	&& DECL_VIRTUAL_P (n->decl)
2254 	&& n->real_symbol_p ())
2255       get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2256 
2257     /* Look also for virtual tables of types that do not define any methods.
2258 
2259        We need it in a case where class B has virtual base of class A
2260        re-defining its virtual method and there is class C with no virtual
2261        methods with B as virtual base.
2262 
2263        Here we output B's virtual method in two variant - for non-virtual
2264        and virtual inheritance.  B's virtual table has non-virtual version,
2265        while C's has virtual.
2266 
2267        For this reason we need to know about C in order to include both
2268        variants of B.  More correctly, record_target_from_binfo should
2269        add both variants of the method when walking B, but we have no
2270        link in between them.
2271 
2272        We rely on fact that either the method is exported and thus we
2273        assume it is called externally or C is in anonymous namespace and
2274        thus we will see the vtable.  */
2275 
2276     else if (is_a <varpool_node *> (n)
2277 	     && DECL_VIRTUAL_P (n->decl)
2278 	     && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2279 	     && TYPE_BINFO (DECL_CONTEXT (n->decl))
2280 	     && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2281       get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2282   if (inheritance_dump_file)
2283     {
2284       dump_type_inheritance_graph (inheritance_dump_file);
2285       dump_end (TDI_inheritance, inheritance_dump_file);
2286     }
2287   timevar_pop (TV_IPA_INHERITANCE);
2288 }
2289 
2290 /* Return true if N has reference from live virtual table
2291    (and thus can be a destination of polymorphic call).
2292    Be conservatively correct when callgraph is not built or
2293    if the method may be referred externally.  */
2294 
2295 static bool
2296 referenced_from_vtable_p (struct cgraph_node *node)
2297 {
2298   int i;
2299   struct ipa_ref *ref;
2300   bool found = false;
2301 
2302   if (node->externally_visible
2303       || DECL_EXTERNAL (node->decl)
2304       || node->used_from_other_partition)
2305     return true;
2306 
2307   /* Keep this test constant time.
2308      It is unlikely this can happen except for the case where speculative
2309      devirtualization introduced many speculative edges to this node.
2310      In this case the target is very likely alive anyway.  */
2311   if (node->ref_list.referring.length () > 100)
2312     return true;
2313 
2314   /* We need references built.  */
2315   if (symtab->state <= CONSTRUCTION)
2316     return true;
2317 
2318   for (i = 0; node->iterate_referring (i, ref); i++)
2319     if ((ref->use == IPA_REF_ALIAS
2320 	 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2321 	|| (ref->use == IPA_REF_ADDR
2322 	    && VAR_P (ref->referring->decl)
2323 	    && DECL_VIRTUAL_P (ref->referring->decl)))
2324       {
2325 	found = true;
2326 	break;
2327       }
2328   return found;
2329 }
2330 
2331 /* Return if TARGET is cxa_pure_virtual.  */
2332 
2333 static bool
2334 is_cxa_pure_virtual_p (tree target)
2335 {
2336   return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2337 	 && DECL_NAME (target)
2338 	 && id_equal (DECL_NAME (target),
2339 		     "__cxa_pure_virtual");
2340 }
2341 
2342 /* If TARGET has associated node, record it in the NODES array.
2343    CAN_REFER specify if program can refer to the target directly.
2344    if TARGET is unknown (NULL) or it can not be inserted (for example because
2345    its body was already removed and there is no way to refer to it), clear
2346    COMPLETEP.  */
2347 
2348 static void
2349 maybe_record_node (vec <cgraph_node *> &nodes,
2350 		   tree target, hash_set<tree> *inserted,
2351 		   bool can_refer,
2352 		   bool *completep)
2353 {
2354   struct cgraph_node *target_node, *alias_target;
2355   enum availability avail;
2356   bool pure_virtual = is_cxa_pure_virtual_p (target);
2357 
2358   /* __builtin_unreachable do not need to be added into
2359      list of targets; the runtime effect of calling them is undefined.
2360      Only "real" virtual methods should be accounted.  */
2361   if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2362     return;
2363 
2364   if (!can_refer)
2365     {
2366       /* The only case when method of anonymous namespace becomes unreferable
2367 	 is when we completely optimized it out.  */
2368       if (flag_ltrans
2369 	  || !target
2370 	  || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2371 	*completep = false;
2372       return;
2373     }
2374 
2375   if (!target)
2376     return;
2377 
2378   target_node = cgraph_node::get (target);
2379 
2380   /* Prefer alias target over aliases, so we do not get confused by
2381      fake duplicates.  */
2382   if (target_node)
2383     {
2384       alias_target = target_node->ultimate_alias_target (&avail);
2385       if (target_node != alias_target
2386 	  && avail >= AVAIL_AVAILABLE
2387 	  && target_node->get_availability ())
2388 	target_node = alias_target;
2389     }
2390 
2391   /* Method can only be called by polymorphic call if any
2392      of vtables referring to it are alive.
2393 
2394      While this holds for non-anonymous functions, too, there are
2395      cases where we want to keep them in the list; for example
2396      inline functions with -fno-weak are static, but we still
2397      may devirtualize them when instance comes from other unit.
2398      The same holds for LTO.
2399 
2400      Currently we ignore these functions in speculative devirtualization.
2401      ??? Maybe it would make sense to be more aggressive for LTO even
2402      elsewhere.  */
2403   if (!flag_ltrans
2404       && !pure_virtual
2405       && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2406       && (!target_node
2407           || !referenced_from_vtable_p (target_node)))
2408     ;
2409   /* See if TARGET is useful function we can deal with.  */
2410   else if (target_node != NULL
2411 	   && (TREE_PUBLIC (target)
2412 	       || DECL_EXTERNAL (target)
2413 	       || target_node->definition)
2414 	   && target_node->real_symbol_p ())
2415     {
2416       gcc_assert (!target_node->global.inlined_to);
2417       gcc_assert (target_node->real_symbol_p ());
2418       /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2419 	 by valid program.  */
2420       if (flag_sanitize & SANITIZE_UNREACHABLE)
2421 	;
2422       /* Only add pure virtual if it is the only possible target.  This way
2423 	 we will preserve the diagnostics about pure virtual called in many
2424 	 cases without disabling optimization in other.  */
2425       else if (pure_virtual)
2426 	{
2427 	  if (nodes.length ())
2428 	    return;
2429 	}
2430       /* If we found a real target, take away cxa_pure_virtual.  */
2431       else if (!pure_virtual && nodes.length () == 1
2432 	       && is_cxa_pure_virtual_p (nodes[0]->decl))
2433 	nodes.pop ();
2434       if (pure_virtual && nodes.length ())
2435 	return;
2436       if (!inserted->add (target))
2437 	{
2438 	  cached_polymorphic_call_targets->add (target_node);
2439 	  nodes.safe_push (target_node);
2440 	}
2441     }
2442   else if (!completep)
2443     ;
2444   /* We have definition of __cxa_pure_virtual that is not accessible (it is
2445      optimized out or partitioned to other unit) so we can not add it.  When
2446      not sanitizing, there is nothing to do.
2447      Otherwise declare the list incomplete.  */
2448   else if (pure_virtual)
2449     {
2450       if (flag_sanitize & SANITIZE_UNREACHABLE)
2451 	*completep = false;
2452     }
2453   else if (flag_ltrans
2454 	   || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2455     *completep = false;
2456 }
2457 
2458 /* See if BINFO's type matches OUTER_TYPE.  If so, look up
2459    BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2460    method in vtable and insert method to NODES array
2461    or BASES_TO_CONSIDER if this array is non-NULL.
2462    Otherwise recurse to base BINFOs.
2463    This matches what get_binfo_at_offset does, but with offset
2464    being unknown.
2465 
2466    TYPE_BINFOS is a stack of BINFOS of types with defined
2467    virtual table seen on way from class type to BINFO.
2468 
2469    MATCHED_VTABLES tracks virtual tables we already did lookup
2470    for virtual function in. INSERTED tracks nodes we already
2471    inserted.
2472 
2473    ANONYMOUS is true if BINFO is part of anonymous namespace.
2474 
2475    Clear COMPLETEP when we hit unreferable target.
2476   */
2477 
2478 static void
2479 record_target_from_binfo (vec <cgraph_node *> &nodes,
2480 			  vec <tree> *bases_to_consider,
2481 			  tree binfo,
2482 			  tree otr_type,
2483 			  vec <tree> &type_binfos,
2484 			  HOST_WIDE_INT otr_token,
2485 			  tree outer_type,
2486 			  HOST_WIDE_INT offset,
2487 			  hash_set<tree> *inserted,
2488 			  hash_set<tree> *matched_vtables,
2489 			  bool anonymous,
2490 			  bool *completep)
2491 {
2492   tree type = BINFO_TYPE (binfo);
2493   int i;
2494   tree base_binfo;
2495 
2496 
2497   if (BINFO_VTABLE (binfo))
2498     type_binfos.safe_push (binfo);
2499   if (types_same_for_odr (type, outer_type))
2500     {
2501       int i;
2502       tree type_binfo = NULL;
2503 
2504       /* Look up BINFO with virtual table.  For normal types it is always last
2505 	 binfo on stack.  */
2506       for (i = type_binfos.length () - 1; i >= 0; i--)
2507 	if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2508 	  {
2509 	    type_binfo = type_binfos[i];
2510 	    break;
2511 	  }
2512       if (BINFO_VTABLE (binfo))
2513 	type_binfos.pop ();
2514       /* If this is duplicated BINFO for base shared by virtual inheritance,
2515 	 we may not have its associated vtable.  This is not a problem, since
2516 	 we will walk it on the other path.  */
2517       if (!type_binfo)
2518 	return;
2519       tree inner_binfo = get_binfo_at_offset (type_binfo,
2520 					      offset, otr_type);
2521       if (!inner_binfo)
2522 	{
2523 	  gcc_assert (odr_violation_reported);
2524 	  return;
2525 	}
2526       /* For types in anonymous namespace first check if the respective vtable
2527 	 is alive. If not, we know the type can't be called.  */
2528       if (!flag_ltrans && anonymous)
2529 	{
2530 	  tree vtable = BINFO_VTABLE (inner_binfo);
2531 	  varpool_node *vnode;
2532 
2533 	  if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2534 	    vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2535 	  vnode = varpool_node::get (vtable);
2536 	  if (!vnode || !vnode->definition)
2537 	    return;
2538 	}
2539       gcc_assert (inner_binfo);
2540       if (bases_to_consider
2541 	  ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2542 	  : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2543 	{
2544 	  bool can_refer;
2545 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2546 							  inner_binfo,
2547 							  &can_refer);
2548 	  if (!bases_to_consider)
2549 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2550 	  /* Destructors are never called via construction vtables.  */
2551 	  else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2552 	    bases_to_consider->safe_push (target);
2553 	}
2554       return;
2555     }
2556 
2557   /* Walk bases.  */
2558   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2559     /* Walking bases that have no virtual method is pointless exercise.  */
2560     if (polymorphic_type_binfo_p (base_binfo))
2561       record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2562 				type_binfos,
2563 				otr_token, outer_type, offset, inserted,
2564 				matched_vtables, anonymous, completep);
2565   if (BINFO_VTABLE (binfo))
2566     type_binfos.pop ();
2567 }
2568 
2569 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2570    of TYPE, insert them to NODES, recurse into derived nodes.
2571    INSERTED is used to avoid duplicate insertions of methods into NODES.
2572    MATCHED_VTABLES are used to avoid duplicate walking vtables.
2573    Clear COMPLETEP if unreferable target is found.
2574 
2575    If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2576    all cases where BASE_SKIPPED is true (because the base is abstract
2577    class).  */
2578 
2579 static void
2580 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2581 				     hash_set<tree> *inserted,
2582 				     hash_set<tree> *matched_vtables,
2583 				     tree otr_type,
2584 				     odr_type type,
2585 				     HOST_WIDE_INT otr_token,
2586 				     tree outer_type,
2587 				     HOST_WIDE_INT offset,
2588 				     bool *completep,
2589 				     vec <tree> &bases_to_consider,
2590 				     bool consider_construction)
2591 {
2592   tree binfo = TYPE_BINFO (type->type);
2593   unsigned int i;
2594   auto_vec <tree, 8> type_binfos;
2595   bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2596 
2597   /* We may need to consider types w/o instances because of possible derived
2598      types using their methods either directly or via construction vtables.
2599      We are safe to skip them when all derivations are known, since we will
2600      handle them later.
2601      This is done by recording them to BASES_TO_CONSIDER array.  */
2602   if (possibly_instantiated || consider_construction)
2603     {
2604       record_target_from_binfo (nodes,
2605 				(!possibly_instantiated
2606 				 && type_all_derivations_known_p (type->type))
2607 				? &bases_to_consider : NULL,
2608 				binfo, otr_type, type_binfos, otr_token,
2609 				outer_type, offset,
2610 				inserted, matched_vtables,
2611 				type->anonymous_namespace, completep);
2612     }
2613   for (i = 0; i < type->derived_types.length (); i++)
2614     possible_polymorphic_call_targets_1 (nodes, inserted,
2615 					 matched_vtables,
2616 					 otr_type,
2617 					 type->derived_types[i],
2618 					 otr_token, outer_type, offset, completep,
2619 					 bases_to_consider, consider_construction);
2620 }
2621 
2622 /* Cache of queries for polymorphic call targets.
2623 
2624    Enumerating all call targets may get expensive when there are many
2625    polymorphic calls in the program, so we memoize all the previous
2626    queries and avoid duplicated work.  */
2627 
2628 struct polymorphic_call_target_d
2629 {
2630   HOST_WIDE_INT otr_token;
2631   ipa_polymorphic_call_context context;
2632   odr_type type;
2633   vec <cgraph_node *> targets;
2634   tree decl_warning;
2635   int type_warning;
2636   unsigned int n_odr_types;
2637   bool complete;
2638   bool speculative;
2639 };
2640 
2641 /* Polymorphic call target cache helpers.  */
2642 
2643 struct polymorphic_call_target_hasher
2644   : pointer_hash <polymorphic_call_target_d>
2645 {
2646   static inline hashval_t hash (const polymorphic_call_target_d *);
2647   static inline bool equal (const polymorphic_call_target_d *,
2648 			    const polymorphic_call_target_d *);
2649   static inline void remove (polymorphic_call_target_d *);
2650 };
2651 
2652 /* Return the computed hashcode for ODR_QUERY.  */
2653 
2654 inline hashval_t
2655 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2656 {
2657   inchash::hash hstate (odr_query->otr_token);
2658 
2659   hstate.add_hwi (odr_query->type->id);
2660   hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2661   hstate.add_hwi (odr_query->context.offset);
2662   hstate.add_hwi (odr_query->n_odr_types);
2663 
2664   if (odr_query->context.speculative_outer_type)
2665     {
2666       hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2667       hstate.add_hwi (odr_query->context.speculative_offset);
2668     }
2669   hstate.add_flag (odr_query->speculative);
2670   hstate.add_flag (odr_query->context.maybe_in_construction);
2671   hstate.add_flag (odr_query->context.maybe_derived_type);
2672   hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2673   hstate.commit_flag ();
2674   return hstate.end ();
2675 }
2676 
2677 /* Compare cache entries T1 and T2.  */
2678 
2679 inline bool
2680 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2681 				       const polymorphic_call_target_d *t2)
2682 {
2683   return (t1->type == t2->type && t1->otr_token == t2->otr_token
2684 	  && t1->speculative == t2->speculative
2685 	  && t1->context.offset == t2->context.offset
2686 	  && t1->context.speculative_offset == t2->context.speculative_offset
2687 	  && t1->context.outer_type == t2->context.outer_type
2688 	  && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2689 	  && t1->context.maybe_in_construction
2690 	      == t2->context.maybe_in_construction
2691 	  && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2692 	  && (t1->context.speculative_maybe_derived_type
2693 	      == t2->context.speculative_maybe_derived_type)
2694 	  /* Adding new type may affect outcome of target search.  */
2695 	  && t1->n_odr_types == t2->n_odr_types);
2696 }
2697 
2698 /* Remove entry in polymorphic call target cache hash.  */
2699 
2700 inline void
2701 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2702 {
2703   v->targets.release ();
2704   free (v);
2705 }
2706 
2707 /* Polymorphic call target query cache.  */
2708 
2709 typedef hash_table<polymorphic_call_target_hasher>
2710    polymorphic_call_target_hash_type;
2711 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2712 
2713 /* Destroy polymorphic call target query cache.  */
2714 
2715 static void
2716 free_polymorphic_call_targets_hash ()
2717 {
2718   if (cached_polymorphic_call_targets)
2719     {
2720       delete polymorphic_call_target_hash;
2721       polymorphic_call_target_hash = NULL;
2722       delete cached_polymorphic_call_targets;
2723       cached_polymorphic_call_targets = NULL;
2724     }
2725 }
2726 
2727 /* Force rebuilding type inheritance graph from scratch.
2728    This is use to make sure that we do not keep references to types
2729    which was not visible to free_lang_data.  */
2730 
2731 void
2732 rebuild_type_inheritance_graph ()
2733 {
2734   if (!odr_hash)
2735     return;
2736   delete odr_hash;
2737   if (in_lto_p)
2738     delete odr_vtable_hash;
2739   odr_hash = NULL;
2740   odr_vtable_hash = NULL;
2741   odr_types_ptr = NULL;
2742   free_polymorphic_call_targets_hash ();
2743 }
2744 
2745 /* When virtual function is removed, we may need to flush the cache.  */
2746 
2747 static void
2748 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2749 {
2750   if (cached_polymorphic_call_targets
2751       && cached_polymorphic_call_targets->contains (n))
2752     free_polymorphic_call_targets_hash ();
2753 }
2754 
2755 /* Look up base of BINFO that has virtual table VTABLE with OFFSET.  */
2756 
2757 tree
2758 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2759 				tree vtable)
2760 {
2761   tree v = BINFO_VTABLE (binfo);
2762   int i;
2763   tree base_binfo;
2764   unsigned HOST_WIDE_INT this_offset;
2765 
2766   if (v)
2767     {
2768       if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2769 	gcc_unreachable ();
2770 
2771       if (offset == this_offset
2772 	  && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2773 	return binfo;
2774     }
2775 
2776   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2777     if (polymorphic_type_binfo_p (base_binfo))
2778       {
2779 	base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2780 	if (base_binfo)
2781 	  return base_binfo;
2782       }
2783   return NULL;
2784 }
2785 
2786 /* T is known constant value of virtual table pointer.
2787    Store virtual table to V and its offset to OFFSET.
2788    Return false if T does not look like virtual table reference.  */
2789 
2790 bool
2791 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2792 				unsigned HOST_WIDE_INT *offset)
2793 {
2794   /* We expect &MEM[(void *)&virtual_table + 16B].
2795      We obtain object's BINFO from the context of the virtual table.
2796      This one contains pointer to virtual table represented via
2797      POINTER_PLUS_EXPR.  Verify that this pointer matches what
2798      we propagated through.
2799 
2800      In the case of virtual inheritance, the virtual tables may
2801      be nested, i.e. the offset may be different from 16 and we may
2802      need to dive into the type representation.  */
2803   if (TREE_CODE (t) == ADDR_EXPR
2804       && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2805       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2806       && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2807       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2808 	  == VAR_DECL)
2809       && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2810 					 (TREE_OPERAND (t, 0), 0), 0)))
2811     {
2812       *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2813       *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2814       return true;
2815     }
2816 
2817   /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2818      We need to handle it when T comes from static variable initializer or
2819      BINFO. */
2820   if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2821     {
2822       *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2823       t = TREE_OPERAND (t, 0);
2824     }
2825   else
2826     *offset = 0;
2827 
2828   if (TREE_CODE (t) != ADDR_EXPR)
2829     return false;
2830   *v = TREE_OPERAND (t, 0);
2831   return true;
2832 }
2833 
2834 /* T is known constant value of virtual table pointer.  Return BINFO of the
2835    instance type.  */
2836 
2837 tree
2838 vtable_pointer_value_to_binfo (const_tree t)
2839 {
2840   tree vtable;
2841   unsigned HOST_WIDE_INT offset;
2842 
2843   if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2844     return NULL_TREE;
2845 
2846   /* FIXME: for stores of construction vtables we return NULL,
2847      because we do not have BINFO for those. Eventually we should fix
2848      our representation to allow this case to be handled, too.
2849      In the case we see store of BINFO we however may assume
2850      that standard folding will be able to cope with it.  */
2851   return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2852 					 offset, vtable);
2853 }
2854 
2855 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2856    Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2857    and insert them in NODES.
2858 
2859    MATCHED_VTABLES and INSERTED is used to avoid duplicated work.  */
2860 
2861 static void
2862 record_targets_from_bases (tree otr_type,
2863 			   HOST_WIDE_INT otr_token,
2864 			   tree outer_type,
2865 			   HOST_WIDE_INT offset,
2866 			   vec <cgraph_node *> &nodes,
2867 			   hash_set<tree> *inserted,
2868 			   hash_set<tree> *matched_vtables,
2869 			   bool *completep)
2870 {
2871   while (true)
2872     {
2873       HOST_WIDE_INT pos, size;
2874       tree base_binfo;
2875       tree fld;
2876 
2877       if (types_same_for_odr (outer_type, otr_type))
2878 	return;
2879 
2880       for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2881 	{
2882 	  if (TREE_CODE (fld) != FIELD_DECL)
2883 	    continue;
2884 
2885 	  pos = int_bit_position (fld);
2886 	  size = tree_to_shwi (DECL_SIZE (fld));
2887 	  if (pos <= offset && (pos + size) > offset
2888 	      /* Do not get confused by zero sized bases.  */
2889 	      && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2890 	    break;
2891 	}
2892       /* Within a class type we should always find corresponding fields.  */
2893       gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2894 
2895       /* Nonbase types should have been stripped by outer_class_type.  */
2896       gcc_assert (DECL_ARTIFICIAL (fld));
2897 
2898       outer_type = TREE_TYPE (fld);
2899       offset -= pos;
2900 
2901       base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2902 					offset, otr_type);
2903       if (!base_binfo)
2904 	{
2905 	  gcc_assert (odr_violation_reported);
2906 	  return;
2907 	}
2908       gcc_assert (base_binfo);
2909       if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2910 	{
2911 	  bool can_refer;
2912 	  tree target = gimple_get_virt_method_for_binfo (otr_token,
2913 							  base_binfo,
2914 							  &can_refer);
2915 	  if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2916 	    maybe_record_node (nodes, target, inserted, can_refer, completep);
2917 	  matched_vtables->add (BINFO_VTABLE (base_binfo));
2918 	}
2919     }
2920 }
2921 
2922 /* When virtual table is removed, we may need to flush the cache.  */
2923 
2924 static void
2925 devirt_variable_node_removal_hook (varpool_node *n,
2926 				   void *d ATTRIBUTE_UNUSED)
2927 {
2928   if (cached_polymorphic_call_targets
2929       && DECL_VIRTUAL_P (n->decl)
2930       && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2931     free_polymorphic_call_targets_hash ();
2932 }
2933 
2934 /* Record about how many calls would benefit from given type to be final.  */
2935 
2936 struct odr_type_warn_count
2937 {
2938   tree type;
2939   int count;
2940   profile_count dyn_count;
2941 };
2942 
2943 /* Record about how many calls would benefit from given method to be final.  */
2944 
2945 struct decl_warn_count
2946 {
2947   tree decl;
2948   int count;
2949   profile_count dyn_count;
2950 };
2951 
2952 /* Information about type and decl warnings.  */
2953 
2954 struct final_warning_record
2955 {
2956   /* If needed grow type_warnings vector and initialize new decl_warn_count
2957      to have dyn_count set to profile_count::zero ().  */
2958   void grow_type_warnings (unsigned newlen);
2959 
2960   profile_count dyn_count;
2961   auto_vec<odr_type_warn_count> type_warnings;
2962   hash_map<tree, decl_warn_count> decl_warnings;
2963 };
2964 
2965 void
2966 final_warning_record::grow_type_warnings (unsigned newlen)
2967 {
2968   unsigned len = type_warnings.length ();
2969   if (newlen > len)
2970     {
2971       type_warnings.safe_grow_cleared (newlen);
2972       for (unsigned i = len; i < newlen; i++)
2973 	type_warnings[i].dyn_count = profile_count::zero ();
2974     }
2975 }
2976 
2977 struct final_warning_record *final_warning_records;
2978 
2979 /* Return vector containing possible targets of polymorphic call of type
2980    OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2981    If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2982    OTR_TYPE and include their virtual method.  This is useful for types
2983    possibly in construction or destruction where the virtual table may
2984    temporarily change to one of base types.  INCLUDE_DERIVER_TYPES make
2985    us to walk the inheritance graph for all derivations.
2986 
2987    If COMPLETEP is non-NULL, store true if the list is complete.
2988    CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2989    in the target cache.  If user needs to visit every target list
2990    just once, it can memoize them.
2991 
2992    If SPECULATIVE is set, the list will not contain targets that
2993    are not speculatively taken.
2994 
2995    Returned vector is placed into cache.  It is NOT caller's responsibility
2996    to free it.  The vector can be freed on cgraph_remove_node call if
2997    the particular node is a virtual function present in the cache.  */
2998 
2999 vec <cgraph_node *>
3000 possible_polymorphic_call_targets (tree otr_type,
3001 			           HOST_WIDE_INT otr_token,
3002 				   ipa_polymorphic_call_context context,
3003 			           bool *completep,
3004 			           void **cache_token,
3005 				   bool speculative)
3006 {
3007   static struct cgraph_node_hook_list *node_removal_hook_holder;
3008   vec <cgraph_node *> nodes = vNULL;
3009   auto_vec <tree, 8> bases_to_consider;
3010   odr_type type, outer_type;
3011   polymorphic_call_target_d key;
3012   polymorphic_call_target_d **slot;
3013   unsigned int i;
3014   tree binfo, target;
3015   bool complete;
3016   bool can_refer = false;
3017   bool skipped = false;
3018 
3019   otr_type = TYPE_MAIN_VARIANT (otr_type);
3020 
3021   /* If ODR is not initialized or the context is invalid, return empty
3022      incomplete list.  */
3023   if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3024     {
3025       if (completep)
3026 	*completep = context.invalid;
3027       if (cache_token)
3028 	*cache_token = NULL;
3029       return nodes;
3030     }
3031 
3032   /* Do not bother to compute speculative info when user do not asks for it.  */
3033   if (!speculative || !context.speculative_outer_type)
3034     context.clear_speculation ();
3035 
3036   type = get_odr_type (otr_type, true);
3037 
3038   /* Recording type variants would waste results cache.  */
3039   gcc_assert (!context.outer_type
3040 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3041 
3042   /* Look up the outer class type we want to walk.
3043      If we fail to do so, the context is invalid.  */
3044   if ((context.outer_type || context.speculative_outer_type)
3045       && !context.restrict_to_inner_class (otr_type))
3046     {
3047       if (completep)
3048 	*completep = true;
3049       if (cache_token)
3050 	*cache_token = NULL;
3051       return nodes;
3052     }
3053   gcc_assert (!context.invalid);
3054 
3055   /* Check that restrict_to_inner_class kept the main variant.  */
3056   gcc_assert (!context.outer_type
3057 	      || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3058 
3059   /* We canonicalize our query, so we do not need extra hashtable entries.  */
3060 
3061   /* Without outer type, we have no use for offset.  Just do the
3062      basic search from inner type.  */
3063   if (!context.outer_type)
3064     context.clear_outer_type (otr_type);
3065   /* We need to update our hierarchy if the type does not exist.  */
3066   outer_type = get_odr_type (context.outer_type, true);
3067   /* If the type is complete, there are no derivations.  */
3068   if (TYPE_FINAL_P (outer_type->type))
3069     context.maybe_derived_type = false;
3070 
3071   /* Initialize query cache.  */
3072   if (!cached_polymorphic_call_targets)
3073     {
3074       cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3075       polymorphic_call_target_hash
3076        	= new polymorphic_call_target_hash_type (23);
3077       if (!node_removal_hook_holder)
3078 	{
3079 	  node_removal_hook_holder =
3080 	    symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3081 	  symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3082 					 NULL);
3083 	}
3084     }
3085 
3086   if (in_lto_p)
3087     {
3088       if (context.outer_type != otr_type)
3089         context.outer_type
3090 	  = get_odr_type (context.outer_type, true)->type;
3091       if (context.speculative_outer_type)
3092         context.speculative_outer_type
3093 	  = get_odr_type (context.speculative_outer_type, true)->type;
3094     }
3095 
3096   /* Look up cached answer.  */
3097   key.type = type;
3098   key.otr_token = otr_token;
3099   key.speculative = speculative;
3100   key.context = context;
3101   key.n_odr_types = odr_types.length ();
3102   slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3103   if (cache_token)
3104    *cache_token = (void *)*slot;
3105   if (*slot)
3106     {
3107       if (completep)
3108 	*completep = (*slot)->complete;
3109       if ((*slot)->type_warning && final_warning_records)
3110 	{
3111 	  final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3112 	  if (!final_warning_records->type_warnings
3113 		[(*slot)->type_warning - 1].dyn_count.initialized_p ())
3114 	    final_warning_records->type_warnings
3115 		[(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3116 	  if (final_warning_records->dyn_count > 0)
3117 	    final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3118 	      = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3119 	        + final_warning_records->dyn_count;
3120 	}
3121       if (!speculative && (*slot)->decl_warning && final_warning_records)
3122 	{
3123 	  struct decl_warn_count *c =
3124 	     final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3125 	  c->count++;
3126 	  if (final_warning_records->dyn_count > 0)
3127 	    c->dyn_count += final_warning_records->dyn_count;
3128 	}
3129       return (*slot)->targets;
3130     }
3131 
3132   complete = true;
3133 
3134   /* Do actual search.  */
3135   timevar_push (TV_IPA_VIRTUAL_CALL);
3136   *slot = XCNEW (polymorphic_call_target_d);
3137   if (cache_token)
3138     *cache_token = (void *)*slot;
3139   (*slot)->type = type;
3140   (*slot)->otr_token = otr_token;
3141   (*slot)->context = context;
3142   (*slot)->speculative = speculative;
3143 
3144   hash_set<tree> inserted;
3145   hash_set<tree> matched_vtables;
3146 
3147   /* First insert targets we speculatively identified as likely.  */
3148   if (context.speculative_outer_type)
3149     {
3150       odr_type speculative_outer_type;
3151       bool speculation_complete = true;
3152 
3153       /* First insert target from type itself and check if it may have
3154 	 derived types.  */
3155       speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3156       if (TYPE_FINAL_P (speculative_outer_type->type))
3157 	context.speculative_maybe_derived_type = false;
3158       binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3159 				   context.speculative_offset, otr_type);
3160       if (binfo)
3161 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3162 						   &can_refer);
3163       else
3164 	target = NULL;
3165 
3166       /* In the case we get complete method, we don't need
3167 	 to walk derivations.  */
3168       if (target && DECL_FINAL_P (target))
3169 	context.speculative_maybe_derived_type = false;
3170       if (type_possibly_instantiated_p (speculative_outer_type->type))
3171 	maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3172       if (binfo)
3173 	matched_vtables.add (BINFO_VTABLE (binfo));
3174 
3175 
3176       /* Next walk recursively all derived types.  */
3177       if (context.speculative_maybe_derived_type)
3178 	for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3179 	  possible_polymorphic_call_targets_1 (nodes, &inserted,
3180 					       &matched_vtables,
3181 					       otr_type,
3182 					       speculative_outer_type->derived_types[i],
3183 					       otr_token, speculative_outer_type->type,
3184 					       context.speculative_offset,
3185 					       &speculation_complete,
3186 					       bases_to_consider,
3187 					       false);
3188     }
3189 
3190   if (!speculative || !nodes.length ())
3191     {
3192       /* First see virtual method of type itself.  */
3193       binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3194 				   context.offset, otr_type);
3195       if (binfo)
3196 	target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3197 						   &can_refer);
3198       else
3199 	{
3200 	  gcc_assert (odr_violation_reported);
3201 	  target = NULL;
3202 	}
3203 
3204       /* Destructors are never called through construction virtual tables,
3205 	 because the type is always known.  */
3206       if (target && DECL_CXX_DESTRUCTOR_P (target))
3207 	context.maybe_in_construction = false;
3208 
3209       if (target)
3210 	{
3211 	  /* In the case we get complete method, we don't need
3212 	     to walk derivations.  */
3213 	  if (DECL_FINAL_P (target))
3214 	    context.maybe_derived_type = false;
3215 	}
3216 
3217       /* If OUTER_TYPE is abstract, we know we are not seeing its instance.  */
3218       if (type_possibly_instantiated_p (outer_type->type))
3219 	maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3220       else
3221 	skipped = true;
3222 
3223       if (binfo)
3224 	matched_vtables.add (BINFO_VTABLE (binfo));
3225 
3226       /* Next walk recursively all derived types.  */
3227       if (context.maybe_derived_type)
3228 	{
3229 	  for (i = 0; i < outer_type->derived_types.length(); i++)
3230 	    possible_polymorphic_call_targets_1 (nodes, &inserted,
3231 						 &matched_vtables,
3232 						 otr_type,
3233 						 outer_type->derived_types[i],
3234 						 otr_token, outer_type->type,
3235 						 context.offset, &complete,
3236 						 bases_to_consider,
3237 						 context.maybe_in_construction);
3238 
3239 	  if (!outer_type->all_derivations_known)
3240 	    {
3241 	      if (!speculative && final_warning_records
3242 		  && nodes.length () == 1
3243 		  && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3244 		{
3245 		  if (complete
3246 		      && warn_suggest_final_types
3247 		      && !outer_type->derived_types.length ())
3248 		    {
3249 		      final_warning_records->grow_type_warnings
3250 			(outer_type->id);
3251 		      final_warning_records->type_warnings[outer_type->id].count++;
3252 		      if (!final_warning_records->type_warnings
3253 				[outer_type->id].dyn_count.initialized_p ())
3254 			final_warning_records->type_warnings
3255 			   [outer_type->id].dyn_count = profile_count::zero ();
3256 		      final_warning_records->type_warnings[outer_type->id].dyn_count
3257 			+= final_warning_records->dyn_count;
3258 		      final_warning_records->type_warnings[outer_type->id].type
3259 			= outer_type->type;
3260 		      (*slot)->type_warning = outer_type->id + 1;
3261 		    }
3262 		  if (complete
3263 		      && warn_suggest_final_methods
3264 		      && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3265 					     outer_type->type))
3266 		    {
3267 		      bool existed;
3268 		      struct decl_warn_count &c =
3269 			 final_warning_records->decl_warnings.get_or_insert
3270 			    (nodes[0]->decl, &existed);
3271 
3272 		      if (existed)
3273 			{
3274 			  c.count++;
3275 			  c.dyn_count += final_warning_records->dyn_count;
3276 			}
3277 		      else
3278 			{
3279 			  c.count = 1;
3280 			  c.dyn_count = final_warning_records->dyn_count;
3281 			  c.decl = nodes[0]->decl;
3282 			}
3283 		      (*slot)->decl_warning = nodes[0]->decl;
3284 		    }
3285 		}
3286 	      complete = false;
3287 	    }
3288 	}
3289 
3290       if (!speculative)
3291 	{
3292 	  /* Destructors are never called through construction virtual tables,
3293 	     because the type is always known.  One of entries may be
3294 	     cxa_pure_virtual so look to at least two of them.  */
3295 	  if (context.maybe_in_construction)
3296 	    for (i =0 ; i < MIN (nodes.length (), 2); i++)
3297 	      if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3298 		context.maybe_in_construction = false;
3299 	  if (context.maybe_in_construction)
3300 	    {
3301 	      if (type != outer_type
3302 		  && (!skipped
3303 		      || (context.maybe_derived_type
3304 			  && !type_all_derivations_known_p (outer_type->type))))
3305 		record_targets_from_bases (otr_type, otr_token, outer_type->type,
3306 					   context.offset, nodes, &inserted,
3307 					   &matched_vtables, &complete);
3308 	      if (skipped)
3309 		maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3310 	      for (i = 0; i < bases_to_consider.length(); i++)
3311 		maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3312 	    }
3313 	}
3314     }
3315 
3316   (*slot)->targets = nodes;
3317   (*slot)->complete = complete;
3318   (*slot)->n_odr_types = odr_types.length ();
3319   if (completep)
3320     *completep = complete;
3321 
3322   timevar_pop (TV_IPA_VIRTUAL_CALL);
3323   return nodes;
3324 }
3325 
3326 bool
3327 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3328 		  vec<const decl_warn_count*> *vec)
3329 {
3330   vec->safe_push (&value);
3331   return true;
3332 }
3333 
3334 /* Dump target list TARGETS into FILE.  */
3335 
3336 static void
3337 dump_targets (FILE *f, vec <cgraph_node *> targets)
3338 {
3339   unsigned int i;
3340 
3341   for (i = 0; i < targets.length (); i++)
3342     {
3343       char *name = NULL;
3344       if (in_lto_p)
3345 	name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3346       fprintf (f, " %s/%i", name ? name : targets[i]->name (),
3347 	       targets[i]->order);
3348       if (in_lto_p)
3349 	free (name);
3350       if (!targets[i]->definition)
3351 	fprintf (f, " (no definition%s)",
3352 		 DECL_DECLARED_INLINE_P (targets[i]->decl)
3353 		 ? " inline" : "");
3354     }
3355   fprintf (f, "\n");
3356 }
3357 
3358 /* Dump all possible targets of a polymorphic call.  */
3359 
3360 void
3361 dump_possible_polymorphic_call_targets (FILE *f,
3362 					tree otr_type,
3363 					HOST_WIDE_INT otr_token,
3364 					const ipa_polymorphic_call_context &ctx)
3365 {
3366   vec <cgraph_node *> targets;
3367   bool final;
3368   odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3369   unsigned int len;
3370 
3371   if (!type)
3372     return;
3373   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3374 					       ctx,
3375 					       &final, NULL, false);
3376   fprintf (f, "  Targets of polymorphic call of type %i:", type->id);
3377   print_generic_expr (f, type->type, TDF_SLIM);
3378   fprintf (f, " token %i\n", (int)otr_token);
3379 
3380   ctx.dump (f);
3381 
3382   fprintf (f, "    %s%s%s%s\n      ",
3383 	   final ? "This is a complete list." :
3384 	   "This is partial list; extra targets may be defined in other units.",
3385 	   ctx.maybe_in_construction ? " (base types included)" : "",
3386 	   ctx.maybe_derived_type ? " (derived types included)" : "",
3387 	   ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3388   len = targets.length ();
3389   dump_targets (f, targets);
3390 
3391   targets = possible_polymorphic_call_targets (otr_type, otr_token,
3392 					       ctx,
3393 					       &final, NULL, true);
3394   if (targets.length () != len)
3395     {
3396       fprintf (f, "  Speculative targets:");
3397       dump_targets (f, targets);
3398     }
3399   /* Ugly: during callgraph construction the target cache may get populated
3400      before all targets are found.  While this is harmless (because all local
3401      types are discovered and only in those case we devirtualize fully and we
3402      don't do speculative devirtualization before IPA stage) it triggers
3403      assert here when dumping at that stage also populates the case with
3404      speculative targets.  Quietly ignore this.  */
3405   gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3406   fprintf (f, "\n");
3407 }
3408 
3409 
3410 /* Return true if N can be possibly target of a polymorphic call of
3411    OTR_TYPE/OTR_TOKEN.  */
3412 
3413 bool
3414 possible_polymorphic_call_target_p (tree otr_type,
3415 				    HOST_WIDE_INT otr_token,
3416 				    const ipa_polymorphic_call_context &ctx,
3417 				    struct cgraph_node *n)
3418 {
3419   vec <cgraph_node *> targets;
3420   unsigned int i;
3421   enum built_in_function fcode;
3422   bool final;
3423 
3424   if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3425       && ((fcode = DECL_FUNCTION_CODE (n->decl)) == BUILT_IN_UNREACHABLE
3426           || fcode == BUILT_IN_TRAP))
3427     return true;
3428 
3429   if (is_cxa_pure_virtual_p (n->decl))
3430     return true;
3431 
3432   if (!odr_hash)
3433     return true;
3434   targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3435   for (i = 0; i < targets.length (); i++)
3436     if (n->semantically_equivalent_p (targets[i]))
3437       return true;
3438 
3439   /* At a moment we allow middle end to dig out new external declarations
3440      as a targets of polymorphic calls.  */
3441   if (!final && !n->definition)
3442     return true;
3443   return false;
3444 }
3445 
3446 
3447 
3448 /* Return true if N can be possibly target of a polymorphic call of
3449    OBJ_TYPE_REF expression REF in STMT.  */
3450 
3451 bool
3452 possible_polymorphic_call_target_p (tree ref,
3453 				    gimple *stmt,
3454 				    struct cgraph_node *n)
3455 {
3456   ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3457   tree call_fn = gimple_call_fn (stmt);
3458 
3459   return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3460 					     tree_to_uhwi
3461 					       (OBJ_TYPE_REF_TOKEN (call_fn)),
3462 					     context,
3463 					     n);
3464 }
3465 
3466 
3467 /* After callgraph construction new external nodes may appear.
3468    Add them into the graph.  */
3469 
3470 void
3471 update_type_inheritance_graph (void)
3472 {
3473   struct cgraph_node *n;
3474 
3475   if (!odr_hash)
3476     return;
3477   free_polymorphic_call_targets_hash ();
3478   timevar_push (TV_IPA_INHERITANCE);
3479   /* We reconstruct the graph starting from types of all methods seen in the
3480      unit.  */
3481   FOR_EACH_FUNCTION (n)
3482     if (DECL_VIRTUAL_P (n->decl)
3483 	&& !n->definition
3484 	&& n->real_symbol_p ())
3485       get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3486   timevar_pop (TV_IPA_INHERITANCE);
3487 }
3488 
3489 
3490 /* Return true if N looks like likely target of a polymorphic call.
3491    Rule out cxa_pure_virtual, noreturns, function declared cold and
3492    other obvious cases.  */
3493 
3494 bool
3495 likely_target_p (struct cgraph_node *n)
3496 {
3497   int flags;
3498   /* cxa_pure_virtual and similar things are not likely.  */
3499   if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3500     return false;
3501   flags = flags_from_decl_or_type (n->decl);
3502   if (flags & ECF_NORETURN)
3503     return false;
3504   if (lookup_attribute ("cold",
3505 			DECL_ATTRIBUTES (n->decl)))
3506     return false;
3507   if (n->frequency < NODE_FREQUENCY_NORMAL)
3508     return false;
3509   /* If there are no live virtual tables referring the target,
3510      the only way the target can be called is an instance coming from other
3511      compilation unit; speculative devirtualization is built around an
3512      assumption that won't happen.  */
3513   if (!referenced_from_vtable_p (n))
3514     return false;
3515   return true;
3516 }
3517 
3518 /* Compare type warning records P1 and P2 and choose one with larger count;
3519    helper for qsort.  */
3520 
3521 int
3522 type_warning_cmp (const void *p1, const void *p2)
3523 {
3524   const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3525   const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3526 
3527   if (t1->dyn_count < t2->dyn_count)
3528    return 1;
3529   if (t1->dyn_count > t2->dyn_count)
3530    return -1;
3531   return t2->count - t1->count;
3532 }
3533 
3534 /* Compare decl warning records P1 and P2 and choose one with larger count;
3535    helper for qsort.  */
3536 
3537 int
3538 decl_warning_cmp (const void *p1, const void *p2)
3539 {
3540   const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3541   const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3542 
3543   if (t1->dyn_count < t2->dyn_count)
3544    return 1;
3545   if (t1->dyn_count > t2->dyn_count)
3546    return -1;
3547   return t2->count - t1->count;
3548 }
3549 
3550 
3551 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3552    context CTX.  */
3553 
3554 struct cgraph_node *
3555 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3556 				  ipa_polymorphic_call_context ctx)
3557 {
3558   vec <cgraph_node *>targets
3559      = possible_polymorphic_call_targets
3560 	  (otr_type, otr_token, ctx, NULL, NULL, true);
3561   unsigned int i;
3562   struct cgraph_node *likely_target = NULL;
3563 
3564   for (i = 0; i < targets.length (); i++)
3565     if (likely_target_p (targets[i]))
3566       {
3567 	if (likely_target)
3568 	  return NULL;
3569 	likely_target = targets[i];
3570       }
3571   if (!likely_target
3572       ||!likely_target->definition
3573       || DECL_EXTERNAL (likely_target->decl))
3574     return NULL;
3575 
3576   /* Don't use an implicitly-declared destructor (c++/58678).  */
3577   struct cgraph_node *non_thunk_target
3578     = likely_target->function_symbol ();
3579   if (DECL_ARTIFICIAL (non_thunk_target->decl))
3580     return NULL;
3581   if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3582       && likely_target->can_be_discarded_p ())
3583     return NULL;
3584   return likely_target;
3585 }
3586 
3587 /* The ipa-devirt pass.
3588    When polymorphic call has only one likely target in the unit,
3589    turn it into a speculative call.  */
3590 
3591 static unsigned int
3592 ipa_devirt (void)
3593 {
3594   struct cgraph_node *n;
3595   hash_set<void *> bad_call_targets;
3596   struct cgraph_edge *e;
3597 
3598   int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3599   int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3600   int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3601   int ndropped = 0;
3602 
3603   if (!odr_types_ptr)
3604     return 0;
3605 
3606   if (dump_file)
3607     dump_type_inheritance_graph (dump_file);
3608 
3609   /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3610      This is implemented by setting up final_warning_records that are updated
3611      by get_polymorphic_call_targets.
3612      We need to clear cache in this case to trigger recomputation of all
3613      entries.  */
3614   if (warn_suggest_final_methods || warn_suggest_final_types)
3615     {
3616       final_warning_records = new (final_warning_record);
3617       final_warning_records->dyn_count = profile_count::zero ();
3618       final_warning_records->grow_type_warnings (odr_types.length ());
3619       free_polymorphic_call_targets_hash ();
3620     }
3621 
3622   FOR_EACH_DEFINED_FUNCTION (n)
3623     {
3624       bool update = false;
3625       if (!opt_for_fn (n->decl, flag_devirtualize))
3626 	continue;
3627       if (dump_file && n->indirect_calls)
3628 	fprintf (dump_file, "\n\nProcesing function %s\n",
3629 		 n->dump_name ());
3630       for (e = n->indirect_calls; e; e = e->next_callee)
3631 	if (e->indirect_info->polymorphic)
3632 	  {
3633 	    struct cgraph_node *likely_target = NULL;
3634 	    void *cache_token;
3635 	    bool final;
3636 
3637 	    if (final_warning_records)
3638 	      final_warning_records->dyn_count = e->count.ipa ();
3639 
3640 	    vec <cgraph_node *>targets
3641 	       = possible_polymorphic_call_targets
3642 		    (e, &final, &cache_token, true);
3643 	    unsigned int i;
3644 
3645 	    /* Trigger warnings by calculating non-speculative targets.  */
3646 	    if (warn_suggest_final_methods || warn_suggest_final_types)
3647 	      possible_polymorphic_call_targets (e);
3648 
3649 	    if (dump_file)
3650 	      dump_possible_polymorphic_call_targets
3651 		(dump_file, e);
3652 
3653 	    npolymorphic++;
3654 
3655 	    /* See if the call can be devirtualized by means of ipa-prop's
3656 	       polymorphic call context propagation.  If not, we can just
3657 	       forget about this call being polymorphic and avoid some heavy
3658 	       lifting in remove_unreachable_nodes that will otherwise try to
3659 	       keep all possible targets alive until inlining and in the inliner
3660 	       itself.
3661 
3662 	       This may need to be revisited once we add further ways to use
3663 	       the may edges, but it is a resonable thing to do right now.  */
3664 
3665 	    if ((e->indirect_info->param_index == -1
3666 		|| (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3667 		    && e->indirect_info->vptr_changed))
3668 		&& !flag_ltrans_devirtualize)
3669 	      {
3670 		e->indirect_info->polymorphic = false;
3671 		ndropped++;
3672 	        if (dump_file)
3673 		  fprintf (dump_file, "Dropping polymorphic call info;"
3674 			   " it can not be used by ipa-prop\n");
3675 	      }
3676 
3677 	    if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3678 	      continue;
3679 
3680 	    if (!e->maybe_hot_p ())
3681 	      {
3682 		if (dump_file)
3683 		  fprintf (dump_file, "Call is cold\n\n");
3684 		ncold++;
3685 		continue;
3686 	      }
3687 	    if (e->speculative)
3688 	      {
3689 		if (dump_file)
3690 		  fprintf (dump_file, "Call is already speculated\n\n");
3691 		nspeculated++;
3692 
3693 		/* When dumping see if we agree with speculation.  */
3694 		if (!dump_file)
3695 		  continue;
3696 	      }
3697 	    if (bad_call_targets.contains (cache_token))
3698 	      {
3699 		if (dump_file)
3700 		  fprintf (dump_file, "Target list is known to be useless\n\n");
3701 		nmultiple++;
3702 		continue;
3703 	      }
3704 	    for (i = 0; i < targets.length (); i++)
3705 	      if (likely_target_p (targets[i]))
3706 		{
3707 		  if (likely_target)
3708 		    {
3709 		      likely_target = NULL;
3710 		      if (dump_file)
3711 			fprintf (dump_file, "More than one likely target\n\n");
3712 		      nmultiple++;
3713 		      break;
3714 		    }
3715 		  likely_target = targets[i];
3716 		}
3717 	    if (!likely_target)
3718 	      {
3719 		bad_call_targets.add (cache_token);
3720 	        continue;
3721 	      }
3722 	    /* This is reached only when dumping; check if we agree or disagree
3723  	       with the speculation.  */
3724 	    if (e->speculative)
3725 	      {
3726 		struct cgraph_edge *e2;
3727 		struct ipa_ref *ref;
3728 		e->speculative_call_info (e2, e, ref);
3729 		if (e2->callee->ultimate_alias_target ()
3730 		    == likely_target->ultimate_alias_target ())
3731 		  {
3732 		    fprintf (dump_file, "We agree with speculation\n\n");
3733 		    nok++;
3734 		  }
3735 		else
3736 		  {
3737 		    fprintf (dump_file, "We disagree with speculation\n\n");
3738 		    nwrong++;
3739 		  }
3740 		continue;
3741 	      }
3742 	    if (!likely_target->definition)
3743 	      {
3744 		if (dump_file)
3745 		  fprintf (dump_file, "Target is not a definition\n\n");
3746 		nnotdefined++;
3747 		continue;
3748 	      }
3749 	    /* Do not introduce new references to external symbols.  While we
3750 	       can handle these just well, it is common for programs to
3751 	       incorrectly with headers defining methods they are linked
3752 	       with.  */
3753 	    if (DECL_EXTERNAL (likely_target->decl))
3754 	      {
3755 		if (dump_file)
3756 		  fprintf (dump_file, "Target is external\n\n");
3757 		nexternal++;
3758 		continue;
3759 	      }
3760 	    /* Don't use an implicitly-declared destructor (c++/58678).  */
3761 	    struct cgraph_node *non_thunk_target
3762 	      = likely_target->function_symbol ();
3763 	    if (DECL_ARTIFICIAL (non_thunk_target->decl))
3764 	      {
3765 		if (dump_file)
3766 		  fprintf (dump_file, "Target is artificial\n\n");
3767 		nartificial++;
3768 		continue;
3769 	      }
3770 	    if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3771 		&& likely_target->can_be_discarded_p ())
3772 	      {
3773 		if (dump_file)
3774 		  fprintf (dump_file, "Target is overwritable\n\n");
3775 		noverwritable++;
3776 		continue;
3777 	      }
3778 	    else if (dbg_cnt (devirt))
3779 	      {
3780 		if (dump_enabled_p ())
3781                   {
3782                     location_t locus = gimple_location_safe (e->call_stmt);
3783                     dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3784 				     "speculatively devirtualizing call "
3785 				     "in %s to %s\n",
3786 				     n->dump_name (),
3787 				     likely_target->dump_name ());
3788                   }
3789 		if (!likely_target->can_be_discarded_p ())
3790 		  {
3791 		    cgraph_node *alias;
3792 		    alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3793 		    if (alias)
3794 		      likely_target = alias;
3795 		  }
3796 		nconverted++;
3797 		update = true;
3798 		e->make_speculative
3799 		  (likely_target, e->count.apply_scale (8, 10));
3800 	      }
3801 	  }
3802       if (update)
3803 	ipa_update_overall_fn_summary (n);
3804     }
3805   if (warn_suggest_final_methods || warn_suggest_final_types)
3806     {
3807       if (warn_suggest_final_types)
3808 	{
3809 	  final_warning_records->type_warnings.qsort (type_warning_cmp);
3810 	  for (unsigned int i = 0;
3811 	       i < final_warning_records->type_warnings.length (); i++)
3812 	    if (final_warning_records->type_warnings[i].count)
3813 	      {
3814 	        tree type = final_warning_records->type_warnings[i].type;
3815 	        int count = final_warning_records->type_warnings[i].count;
3816 	        profile_count dyn_count
3817 		  = final_warning_records->type_warnings[i].dyn_count;
3818 
3819 		if (!(dyn_count > 0))
3820 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3821 			     OPT_Wsuggest_final_types, count,
3822 			     "Declaring type %qD final "
3823 			     "would enable devirtualization of %i call",
3824 			     "Declaring type %qD final "
3825 			     "would enable devirtualization of %i calls",
3826 			     type,
3827 			     count);
3828 		else
3829 		  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3830 			     OPT_Wsuggest_final_types, count,
3831 			     "Declaring type %qD final "
3832 			     "would enable devirtualization of %i call "
3833 			     "executed %lli times",
3834 			     "Declaring type %qD final "
3835 			     "would enable devirtualization of %i calls "
3836 			     "executed %lli times",
3837 			     type,
3838 			     count,
3839 			     (long long) dyn_count.to_gcov_type ());
3840 	      }
3841 	}
3842 
3843       if (warn_suggest_final_methods)
3844 	{
3845 	  auto_vec<const decl_warn_count*> decl_warnings_vec;
3846 
3847 	  final_warning_records->decl_warnings.traverse
3848 	    <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3849 	  decl_warnings_vec.qsort (decl_warning_cmp);
3850 	  for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3851 	    {
3852 	      tree decl = decl_warnings_vec[i]->decl;
3853 	      int count = decl_warnings_vec[i]->count;
3854 	      profile_count dyn_count
3855 		  = decl_warnings_vec[i]->dyn_count;
3856 
3857 	      if (!(dyn_count > 0))
3858 		if (DECL_CXX_DESTRUCTOR_P (decl))
3859 		  warning_n (DECL_SOURCE_LOCATION (decl),
3860 			      OPT_Wsuggest_final_methods, count,
3861 			      "Declaring virtual destructor of %qD final "
3862 			      "would enable devirtualization of %i call",
3863 			      "Declaring virtual destructor of %qD final "
3864 			      "would enable devirtualization of %i calls",
3865 			      DECL_CONTEXT (decl), count);
3866 		else
3867 		  warning_n (DECL_SOURCE_LOCATION (decl),
3868 			      OPT_Wsuggest_final_methods, count,
3869 			      "Declaring method %qD final "
3870 			      "would enable devirtualization of %i call",
3871 			      "Declaring method %qD final "
3872 			      "would enable devirtualization of %i calls",
3873 			      decl, count);
3874 	       else if (DECL_CXX_DESTRUCTOR_P (decl))
3875 		  warning_n (DECL_SOURCE_LOCATION (decl),
3876 			      OPT_Wsuggest_final_methods, count,
3877 			      "Declaring virtual destructor of %qD final "
3878 			      "would enable devirtualization of %i call "
3879 			      "executed %lli times",
3880 			      "Declaring virtual destructor of %qD final "
3881 			      "would enable devirtualization of %i calls "
3882 			      "executed %lli times",
3883 			      DECL_CONTEXT (decl), count,
3884 			      (long long)dyn_count.to_gcov_type ());
3885 		else
3886 		  warning_n (DECL_SOURCE_LOCATION (decl),
3887 			      OPT_Wsuggest_final_methods, count,
3888 			      "Declaring method %qD final "
3889 			      "would enable devirtualization of %i call "
3890 			      "executed %lli times",
3891 			      "Declaring method %qD final "
3892 			      "would enable devirtualization of %i calls "
3893 			      "executed %lli times",
3894 			      decl, count,
3895 			      (long long)dyn_count.to_gcov_type ());
3896 	    }
3897 	}
3898 
3899       delete (final_warning_records);
3900       final_warning_records = 0;
3901     }
3902 
3903   if (dump_file)
3904     fprintf (dump_file,
3905 	     "%i polymorphic calls, %i devirtualized,"
3906 	     " %i speculatively devirtualized, %i cold\n"
3907 	     "%i have multiple targets, %i overwritable,"
3908 	     " %i already speculated (%i agree, %i disagree),"
3909 	     " %i external, %i not defined, %i artificial, %i infos dropped\n",
3910 	     npolymorphic, ndevirtualized, nconverted, ncold,
3911 	     nmultiple, noverwritable, nspeculated, nok, nwrong,
3912 	     nexternal, nnotdefined, nartificial, ndropped);
3913   return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3914 }
3915 
3916 namespace {
3917 
3918 const pass_data pass_data_ipa_devirt =
3919 {
3920   IPA_PASS, /* type */
3921   "devirt", /* name */
3922   OPTGROUP_NONE, /* optinfo_flags */
3923   TV_IPA_DEVIRT, /* tv_id */
3924   0, /* properties_required */
3925   0, /* properties_provided */
3926   0, /* properties_destroyed */
3927   0, /* todo_flags_start */
3928   ( TODO_dump_symtab ), /* todo_flags_finish */
3929 };
3930 
3931 class pass_ipa_devirt : public ipa_opt_pass_d
3932 {
3933 public:
3934   pass_ipa_devirt (gcc::context *ctxt)
3935     : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3936 		      NULL, /* generate_summary */
3937 		      NULL, /* write_summary */
3938 		      NULL, /* read_summary */
3939 		      NULL, /* write_optimization_summary */
3940 		      NULL, /* read_optimization_summary */
3941 		      NULL, /* stmt_fixup */
3942 		      0, /* function_transform_todo_flags_start */
3943 		      NULL, /* function_transform */
3944 		      NULL) /* variable_transform */
3945   {}
3946 
3947   /* opt_pass methods: */
3948   virtual bool gate (function *)
3949     {
3950       /* In LTO, always run the IPA passes and decide on function basis if the
3951 	 pass is enabled.  */
3952       if (in_lto_p)
3953 	return true;
3954       return (flag_devirtualize
3955 	      && (flag_devirtualize_speculatively
3956 		  || (warn_suggest_final_methods
3957 		      || warn_suggest_final_types))
3958 	      && optimize);
3959     }
3960 
3961   virtual unsigned int execute (function *) { return ipa_devirt (); }
3962 
3963 }; // class pass_ipa_devirt
3964 
3965 } // anon namespace
3966 
3967 ipa_opt_pass_d *
3968 make_pass_ipa_devirt (gcc::context *ctxt)
3969 {
3970   return new pass_ipa_devirt (ctxt);
3971 }
3972 
3973 #include "gt-ipa-devirt.h"
3974