1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2022 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 #include "stringpool.h"
133 #include "attribs.h"
134 #include "data-streamer.h"
135 #include "lto-streamer.h"
136 #include "streamer-hooks.h"
137
138 /* Hash based set of pairs of types. */
139 struct type_pair
140 {
141 tree first;
142 tree second;
143 };
144
145 template <>
146 struct default_hash_traits <type_pair>
147 : typed_noop_remove <type_pair>
148 {
149 GTY((skip)) typedef type_pair value_type;
150 GTY((skip)) typedef type_pair compare_type;
151 static hashval_t
hashdefault_hash_traits152 hash (type_pair p)
153 {
154 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
155 }
156 static const bool empty_zero_p = true;
157 static bool
is_emptydefault_hash_traits158 is_empty (type_pair p)
159 {
160 return p.first == NULL;
161 }
162 static bool
is_deleteddefault_hash_traits163 is_deleted (type_pair p ATTRIBUTE_UNUSED)
164 {
165 return false;
166 }
167 static bool
equaldefault_hash_traits168 equal (const type_pair &a, const type_pair &b)
169 {
170 return a.first==b.first && a.second == b.second;
171 }
172 static void
mark_emptydefault_hash_traits173 mark_empty (type_pair &e)
174 {
175 e.first = NULL;
176 }
177 };
178
179 /* HACK alert: this is used to communicate with ipa-inline-transform that
180 thunk is being expanded and there is no need to clear the polymorphic
181 call target cache. */
182 bool thunk_expansion;
183
184 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
185 hash_set<type_pair> *,
186 location_t, location_t);
187 static void warn_odr (tree t1, tree t2, tree st1, tree st2,
188 bool warn, bool *warned, const char *reason);
189
190 static bool odr_violation_reported = false;
191
192
193 /* Pointer set of all call targets appearing in the cache. */
194 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
195
196 /* The node of type inheritance graph. For each type unique in
197 One Definition Rule (ODR) sense, we produce one node linking all
198 main variants of types equivalent to it, bases and derived types. */
199
200 struct GTY(()) odr_type_d
201 {
202 /* leader type. */
203 tree type;
204 /* All bases; built only for main variants of types. */
205 vec<odr_type> GTY((skip)) bases;
206 /* All derived types with virtual methods seen in unit;
207 built only for main variants of types. */
208 vec<odr_type> GTY((skip)) derived_types;
209
210 /* All equivalent types, if more than one. */
211 vec<tree, va_gc> *types;
212 /* Set of all equivalent types, if NON-NULL. */
213 hash_set<tree> * GTY((skip)) types_set;
214
215 /* Unique ID indexing the type in odr_types array. */
216 int id;
217 /* Is it in anonymous namespace? */
218 bool anonymous_namespace;
219 /* Do we know about all derivations of given type? */
220 bool all_derivations_known;
221 /* Did we report ODR violation here? */
222 bool odr_violated;
223 /* Set when virtual table without RTTI prevailed table with. */
224 bool rtti_broken;
225 /* Set when the canonical type is determined using the type name. */
226 bool tbaa_enabled;
227 };
228
229 /* Return TRUE if all derived types of T are known and thus
230 we may consider the walk of derived type complete.
231
232 This is typically true only for final anonymous namespace types and types
233 defined within functions (that may be COMDAT and thus shared across units,
234 but with the same set of derived types). */
235
236 bool
type_all_derivations_known_p(const_tree t)237 type_all_derivations_known_p (const_tree t)
238 {
239 if (TYPE_FINAL_P (t))
240 return true;
241 if (flag_ltrans)
242 return false;
243 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
244 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
245 return true;
246 if (type_in_anonymous_namespace_p (t))
247 return true;
248 return (decl_function_context (TYPE_NAME (t)) != NULL);
249 }
250
251 /* Return TRUE if type's constructors are all visible. */
252
253 static bool
type_all_ctors_visible_p(tree t)254 type_all_ctors_visible_p (tree t)
255 {
256 return !flag_ltrans
257 && symtab->state >= CONSTRUCTION
258 /* We cannot always use type_all_derivations_known_p.
259 For function local types we must assume case where
260 the function is COMDAT and shared in between units.
261
262 TODO: These cases are quite easy to get, but we need
263 to keep track of C++ privatizing via -Wno-weak
264 as well as the IPA privatizing. */
265 && type_in_anonymous_namespace_p (t);
266 }
267
268 /* Return TRUE if type may have instance. */
269
270 static bool
type_possibly_instantiated_p(tree t)271 type_possibly_instantiated_p (tree t)
272 {
273 tree vtable;
274 varpool_node *vnode;
275
276 /* TODO: Add abstract types here. */
277 if (!type_all_ctors_visible_p (t))
278 return true;
279
280 vtable = BINFO_VTABLE (TYPE_BINFO (t));
281 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
282 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
283 vnode = varpool_node::get (vtable);
284 return vnode && vnode->definition;
285 }
286
287 /* Return true if T or type derived from T may have instance. */
288
289 static bool
type_or_derived_type_possibly_instantiated_p(odr_type t)290 type_or_derived_type_possibly_instantiated_p (odr_type t)
291 {
292 if (type_possibly_instantiated_p (t->type))
293 return true;
294 for (auto derived : t->derived_types)
295 if (type_or_derived_type_possibly_instantiated_p (derived))
296 return true;
297 return false;
298 }
299
300 /* Hash used to unify ODR types based on their mangled name and for anonymous
301 namespace types. */
302
303 struct odr_name_hasher : pointer_hash <odr_type_d>
304 {
305 typedef union tree_node *compare_type;
306 static inline hashval_t hash (const odr_type_d *);
307 static inline bool equal (const odr_type_d *, const tree_node *);
308 static inline void remove (odr_type_d *);
309 };
310
311 static bool
can_be_name_hashed_p(tree t)312 can_be_name_hashed_p (tree t)
313 {
314 return (!in_lto_p || odr_type_p (t));
315 }
316
317 /* Hash type by its ODR name. */
318
319 static hashval_t
hash_odr_name(const_tree t)320 hash_odr_name (const_tree t)
321 {
322 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
323
324 /* If not in LTO, all main variants are unique, so we can do
325 pointer hash. */
326 if (!in_lto_p)
327 return htab_hash_pointer (t);
328
329 /* Anonymous types are unique. */
330 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
331 return htab_hash_pointer (t);
332
333 gcc_checking_assert (TYPE_NAME (t)
334 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
335 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
336 }
337
338 /* Return the computed hashcode for ODR_TYPE. */
339
340 inline hashval_t
hash(const odr_type_d * odr_type)341 odr_name_hasher::hash (const odr_type_d *odr_type)
342 {
343 return hash_odr_name (odr_type->type);
344 }
345
346 /* For languages with One Definition Rule, work out if
347 types are the same based on their name.
348
349 This is non-trivial for LTO where minor differences in
350 the type representation may have prevented type merging
351 to merge two copies of otherwise equivalent type.
352
353 Until we start streaming mangled type names, this function works
354 only for polymorphic types.
355 */
356
357 bool
types_same_for_odr(const_tree type1,const_tree type2)358 types_same_for_odr (const_tree type1, const_tree type2)
359 {
360 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
361
362 type1 = TYPE_MAIN_VARIANT (type1);
363 type2 = TYPE_MAIN_VARIANT (type2);
364
365 if (type1 == type2)
366 return true;
367
368 if (!in_lto_p)
369 return false;
370
371 /* Anonymous namespace types are never duplicated. */
372 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
373 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
374 return false;
375
376 /* If both type has mangled defined check if they are same.
377 Watch for anonymous types which are all mangled as "<anon">. */
378 if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
379 return false;
380 if (type_in_anonymous_namespace_p (type1)
381 || type_in_anonymous_namespace_p (type2))
382 return false;
383 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
384 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
385 }
386
387 /* Return true if we can decide on ODR equivalency.
388
389 In non-LTO it is always decide, in LTO however it depends in the type has
390 ODR info attached. */
391
392 bool
types_odr_comparable(tree t1,tree t2)393 types_odr_comparable (tree t1, tree t2)
394 {
395 return (!in_lto_p
396 || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
397 || (odr_type_p (TYPE_MAIN_VARIANT (t1))
398 && odr_type_p (TYPE_MAIN_VARIANT (t2))));
399 }
400
401 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
402 known, be conservative and return false. */
403
404 bool
types_must_be_same_for_odr(tree t1,tree t2)405 types_must_be_same_for_odr (tree t1, tree t2)
406 {
407 if (types_odr_comparable (t1, t2))
408 return types_same_for_odr (t1, t2);
409 else
410 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
411 }
412
413 /* If T is compound type, return type it is based on. */
414
415 static tree
compound_type_base(const_tree t)416 compound_type_base (const_tree t)
417 {
418 if (TREE_CODE (t) == ARRAY_TYPE
419 || POINTER_TYPE_P (t)
420 || TREE_CODE (t) == COMPLEX_TYPE
421 || VECTOR_TYPE_P (t))
422 return TREE_TYPE (t);
423 if (TREE_CODE (t) == METHOD_TYPE)
424 return TYPE_METHOD_BASETYPE (t);
425 if (TREE_CODE (t) == OFFSET_TYPE)
426 return TYPE_OFFSET_BASETYPE (t);
427 return NULL_TREE;
428 }
429
430 /* Return true if T is either ODR type or compound type based from it.
431 If the function return true, we know that T is a type originating from C++
432 source even at link-time. */
433
434 bool
odr_or_derived_type_p(const_tree t)435 odr_or_derived_type_p (const_tree t)
436 {
437 do
438 {
439 if (odr_type_p (TYPE_MAIN_VARIANT (t)))
440 return true;
441 /* Function type is a tricky one. Basically we can consider it
442 ODR derived if return type or any of the parameters is.
443 We need to check all parameters because LTO streaming merges
444 common types (such as void) and they are not considered ODR then. */
445 if (TREE_CODE (t) == FUNCTION_TYPE)
446 {
447 if (TYPE_METHOD_BASETYPE (t))
448 t = TYPE_METHOD_BASETYPE (t);
449 else
450 {
451 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
452 return true;
453 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
454 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
455 return true;
456 return false;
457 }
458 }
459 else
460 t = compound_type_base (t);
461 }
462 while (t);
463 return t;
464 }
465
466 /* Compare types T1 and T2 and return true if they are
467 equivalent. */
468
469 inline bool
equal(const odr_type_d * o1,const tree_node * t2)470 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
471 {
472 tree t1 = o1->type;
473
474 gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
475 gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
476 if (t1 == t2)
477 return true;
478 if (!in_lto_p)
479 return false;
480 /* Check for anonymous namespaces. */
481 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
482 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
483 return false;
484 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
485 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
486 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
487 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
488 }
489
490 /* Free ODR type V. */
491
492 inline void
remove(odr_type_d * v)493 odr_name_hasher::remove (odr_type_d *v)
494 {
495 v->bases.release ();
496 v->derived_types.release ();
497 if (v->types_set)
498 delete v->types_set;
499 ggc_free (v);
500 }
501
502 /* ODR type hash used to look up ODR type based on tree type node. */
503
504 typedef hash_table<odr_name_hasher> odr_hash_type;
505 static odr_hash_type *odr_hash;
506
507 /* ODR types are also stored into ODR_TYPE vector to allow consistent
508 walking. Bases appear before derived types. Vector is garbage collected
509 so we won't end up visiting empty types. */
510
511 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
512 #define odr_types (*odr_types_ptr)
513
514 /* All enums defined and accessible for the unit. */
515 static GTY(()) vec <tree, va_gc> *odr_enums;
516
517 /* Information we hold about value defined by an enum type. */
518 struct odr_enum_val
519 {
520 const char *name;
521 wide_int val;
522 location_t locus;
523 };
524
525 /* Information about enum values. */
526 struct odr_enum
527 {
528 location_t locus;
529 auto_vec<odr_enum_val, 0> vals;
530 bool warned;
531 };
532
533 /* A table of all ODR enum definitions. */
534 static hash_map <nofree_string_hash, odr_enum> *odr_enum_map = NULL;
535 static struct obstack odr_enum_obstack;
536
537 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
538 void
set_type_binfo(tree type,tree binfo)539 set_type_binfo (tree type, tree binfo)
540 {
541 for (; type; type = TYPE_NEXT_VARIANT (type))
542 if (COMPLETE_TYPE_P (type))
543 TYPE_BINFO (type) = binfo;
544 else
545 gcc_assert (!TYPE_BINFO (type));
546 }
547
548 /* Return true if type variants match.
549 This assumes that we already verified that T1 and T2 are variants of the
550 same type. */
551
552 static bool
type_variants_equivalent_p(tree t1,tree t2)553 type_variants_equivalent_p (tree t1, tree t2)
554 {
555 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
556 return false;
557
558 if (comp_type_attributes (t1, t2) != 1)
559 return false;
560
561 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
562 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
563 return false;
564
565 return true;
566 }
567
568 /* Compare T1 and T2 based on name or structure. */
569
570 static bool
odr_subtypes_equivalent_p(tree t1,tree t2,hash_set<type_pair> * visited,location_t loc1,location_t loc2)571 odr_subtypes_equivalent_p (tree t1, tree t2,
572 hash_set<type_pair> *visited,
573 location_t loc1, location_t loc2)
574 {
575
576 /* This can happen in incomplete types that should be handled earlier. */
577 gcc_assert (t1 && t2);
578
579 if (t1 == t2)
580 return true;
581
582 /* Anonymous namespace types must match exactly. */
583 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
584 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
585 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
586 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
587 return false;
588
589 /* For ODR types be sure to compare their names.
590 To support -Wno-odr-type-merging we allow one type to be non-ODR
591 and other ODR even though it is a violation. */
592 if (types_odr_comparable (t1, t2))
593 {
594 if (t1 != t2
595 && odr_type_p (TYPE_MAIN_VARIANT (t1))
596 && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
597 return false;
598 if (!types_same_for_odr (t1, t2))
599 return false;
600 if (!type_variants_equivalent_p (t1, t2))
601 return false;
602 /* Limit recursion: If subtypes are ODR types and we know
603 that they are same, be happy. */
604 if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
605 return true;
606 }
607
608 /* Component types, builtins and possibly violating ODR types
609 have to be compared structurally. */
610 if (TREE_CODE (t1) != TREE_CODE (t2))
611 return false;
612 if (AGGREGATE_TYPE_P (t1)
613 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
614 return false;
615
616 type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
617 if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
618 {
619 pair.first = TYPE_MAIN_VARIANT (t2);
620 pair.second = TYPE_MAIN_VARIANT (t1);
621 }
622 if (visited->add (pair))
623 return true;
624 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
625 false, NULL, visited, loc1, loc2))
626 return false;
627 if (!type_variants_equivalent_p (t1, t2))
628 return false;
629 return true;
630 }
631
632 /* Return true if DECL1 and DECL2 are identical methods. Consider
633 name equivalent to name.localalias.xyz. */
634
635 static bool
methods_equal_p(tree decl1,tree decl2)636 methods_equal_p (tree decl1, tree decl2)
637 {
638 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
639 return true;
640 const char sep = symbol_table::symbol_suffix_separator ();
641
642 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
643 const char *ptr1 = strchr (name1, sep);
644 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
645
646 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
647 const char *ptr2 = strchr (name2, sep);
648 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
649
650 if (len1 != len2)
651 return false;
652 return !strncmp (name1, name2, len1);
653 }
654
655 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
656 violation warnings. */
657
658 void
compare_virtual_tables(varpool_node * prevailing,varpool_node * vtable)659 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
660 {
661 int n1, n2;
662
663 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
664 {
665 odr_violation_reported = true;
666 if (DECL_VIRTUAL_P (prevailing->decl))
667 {
668 varpool_node *tmp = prevailing;
669 prevailing = vtable;
670 vtable = tmp;
671 }
672 auto_diagnostic_group d;
673 if (warning_at (DECL_SOURCE_LOCATION
674 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
675 OPT_Wodr,
676 "virtual table of type %qD violates one definition rule",
677 DECL_CONTEXT (vtable->decl)))
678 inform (DECL_SOURCE_LOCATION (prevailing->decl),
679 "variable of same assembler name as the virtual table is "
680 "defined in another translation unit");
681 return;
682 }
683 if (!prevailing->definition || !vtable->definition)
684 return;
685
686 /* If we do not stream ODR type info, do not bother to do useful compare. */
687 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
688 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
689 return;
690
691 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
692
693 if (class_type->odr_violated)
694 return;
695
696 for (n1 = 0, n2 = 0; true; n1++, n2++)
697 {
698 struct ipa_ref *ref1, *ref2;
699 bool end1, end2;
700
701 end1 = !prevailing->iterate_reference (n1, ref1);
702 end2 = !vtable->iterate_reference (n2, ref2);
703
704 /* !DECL_VIRTUAL_P means RTTI entry;
705 We warn when RTTI is lost because non-RTTI prevails; we silently
706 accept the other case. */
707 while (!end2
708 && (end1
709 || (methods_equal_p (ref1->referred->decl,
710 ref2->referred->decl)
711 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
712 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
713 {
714 if (!class_type->rtti_broken)
715 {
716 auto_diagnostic_group d;
717 if (warning_at (DECL_SOURCE_LOCATION
718 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
719 OPT_Wodr,
720 "virtual table of type %qD contains RTTI "
721 "information",
722 DECL_CONTEXT (vtable->decl)))
723 {
724 inform (DECL_SOURCE_LOCATION
725 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
726 "but is prevailed by one without from other"
727 " translation unit");
728 inform (DECL_SOURCE_LOCATION
729 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
730 "RTTI will not work on this type");
731 class_type->rtti_broken = true;
732 }
733 }
734 n2++;
735 end2 = !vtable->iterate_reference (n2, ref2);
736 }
737 while (!end1
738 && (end2
739 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
740 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
741 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
742 {
743 n1++;
744 end1 = !prevailing->iterate_reference (n1, ref1);
745 }
746
747 /* Finished? */
748 if (end1 && end2)
749 {
750 /* Extra paranoia; compare the sizes. We do not have information
751 about virtual inheritance offsets, so just be sure that these
752 match.
753 Do this as very last check so the not very informative error
754 is not output too often. */
755 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
756 {
757 class_type->odr_violated = true;
758 auto_diagnostic_group d;
759 tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
760 if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
761 "virtual table of type %qD violates "
762 "one definition rule",
763 DECL_CONTEXT (vtable->decl)))
764 {
765 ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
766 inform (DECL_SOURCE_LOCATION (ctx),
767 "the conflicting type defined in another translation"
768 " unit has virtual table of different size");
769 }
770 }
771 return;
772 }
773
774 if (!end1 && !end2)
775 {
776 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
777 continue;
778
779 class_type->odr_violated = true;
780
781 /* If the loops above stopped on non-virtual pointer, we have
782 mismatch in RTTI information mangling. */
783 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
784 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
785 {
786 auto_diagnostic_group d;
787 if (warning_at (DECL_SOURCE_LOCATION
788 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
789 OPT_Wodr,
790 "virtual table of type %qD violates "
791 "one definition rule",
792 DECL_CONTEXT (vtable->decl)))
793 {
794 inform (DECL_SOURCE_LOCATION
795 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
796 "the conflicting type defined in another translation "
797 "unit with different RTTI information");
798 }
799 return;
800 }
801 /* At this point both REF1 and REF2 points either to virtual table
802 or virtual method. If one points to virtual table and other to
803 method we can complain the same way as if one table was shorter
804 than other pointing out the extra method. */
805 if (TREE_CODE (ref1->referred->decl)
806 != TREE_CODE (ref2->referred->decl))
807 {
808 if (VAR_P (ref1->referred->decl))
809 end1 = true;
810 else if (VAR_P (ref2->referred->decl))
811 end2 = true;
812 }
813 }
814
815 class_type->odr_violated = true;
816
817 /* Complain about size mismatch. Either we have too many virtual
818 functions or too many virtual table pointers. */
819 if (end1 || end2)
820 {
821 if (end1)
822 {
823 varpool_node *tmp = prevailing;
824 prevailing = vtable;
825 vtable = tmp;
826 ref1 = ref2;
827 }
828 auto_diagnostic_group d;
829 if (warning_at (DECL_SOURCE_LOCATION
830 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
831 OPT_Wodr,
832 "virtual table of type %qD violates "
833 "one definition rule",
834 DECL_CONTEXT (vtable->decl)))
835 {
836 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
837 {
838 inform (DECL_SOURCE_LOCATION
839 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
840 "the conflicting type defined in another translation "
841 "unit");
842 inform (DECL_SOURCE_LOCATION
843 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
844 "contains additional virtual method %qD",
845 ref1->referred->decl);
846 }
847 else
848 {
849 inform (DECL_SOURCE_LOCATION
850 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
851 "the conflicting type defined in another translation "
852 "unit has virtual table with more entries");
853 }
854 }
855 return;
856 }
857
858 /* And in the last case we have either mismatch in between two virtual
859 methods or two virtual table pointers. */
860 auto_diagnostic_group d;
861 if (warning_at (DECL_SOURCE_LOCATION
862 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
863 "virtual table of type %qD violates "
864 "one definition rule",
865 DECL_CONTEXT (vtable->decl)))
866 {
867 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
868 {
869 inform (DECL_SOURCE_LOCATION
870 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
871 "the conflicting type defined in another translation "
872 "unit");
873 gcc_assert (TREE_CODE (ref2->referred->decl)
874 == FUNCTION_DECL);
875 inform (DECL_SOURCE_LOCATION
876 (ref1->referred->ultimate_alias_target ()->decl),
877 "virtual method %qD",
878 ref1->referred->ultimate_alias_target ()->decl);
879 inform (DECL_SOURCE_LOCATION
880 (ref2->referred->ultimate_alias_target ()->decl),
881 "ought to match virtual method %qD but does not",
882 ref2->referred->ultimate_alias_target ()->decl);
883 }
884 else
885 inform (DECL_SOURCE_LOCATION
886 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
887 "the conflicting type defined in another translation "
888 "unit has virtual table with different contents");
889 return;
890 }
891 }
892 }
893
894 /* Output ODR violation warning about T1 and T2 with REASON.
895 Display location of ST1 and ST2 if REASON speaks about field or
896 method of the type.
897 If WARN is false, do nothing. Set WARNED if warning was indeed
898 output. */
899
900 static void
warn_odr(tree t1,tree t2,tree st1,tree st2,bool warn,bool * warned,const char * reason)901 warn_odr (tree t1, tree t2, tree st1, tree st2,
902 bool warn, bool *warned, const char *reason)
903 {
904 tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
905 if (warned)
906 *warned = false;
907
908 if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
909 return;
910
911 /* ODR warnings are output during LTO streaming; we must apply location
912 cache for potential warnings to be output correctly. */
913 if (lto_location_cache::current_cache)
914 lto_location_cache::current_cache->apply_location_cache ();
915
916 auto_diagnostic_group d;
917 if (t1 != TYPE_MAIN_VARIANT (t1)
918 && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
919 {
920 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
921 OPT_Wodr, "type %qT (typedef of %qT) violates the "
922 "C++ One Definition Rule",
923 t1, TYPE_MAIN_VARIANT (t1)))
924 return;
925 }
926 else
927 {
928 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
929 OPT_Wodr, "type %qT violates the C++ One Definition Rule",
930 t1))
931 return;
932 }
933 if (!st1 && !st2)
934 ;
935 /* For FIELD_DECL support also case where one of fields is
936 NULL - this is used when the structures have mismatching number of
937 elements. */
938 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
939 {
940 inform (DECL_SOURCE_LOCATION (decl2),
941 "a different type is defined in another translation unit");
942 if (!st1)
943 {
944 st1 = st2;
945 st2 = NULL;
946 }
947 inform (DECL_SOURCE_LOCATION (st1),
948 "the first difference of corresponding definitions is field %qD",
949 st1);
950 if (st2)
951 decl2 = st2;
952 }
953 else if (TREE_CODE (st1) == FUNCTION_DECL)
954 {
955 inform (DECL_SOURCE_LOCATION (decl2),
956 "a different type is defined in another translation unit");
957 inform (DECL_SOURCE_LOCATION (st1),
958 "the first difference of corresponding definitions is method %qD",
959 st1);
960 decl2 = st2;
961 }
962 else
963 return;
964 inform (DECL_SOURCE_LOCATION (decl2), reason);
965
966 if (warned)
967 *warned = true;
968 }
969
970 /* Return true if T1 and T2 are incompatible and we want to recursively
971 dive into them from warn_type_mismatch to give sensible answer. */
972
973 static bool
type_mismatch_p(tree t1,tree t2)974 type_mismatch_p (tree t1, tree t2)
975 {
976 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
977 && !odr_types_equivalent_p (t1, t2))
978 return true;
979 return !types_compatible_p (t1, t2);
980 }
981
982
983 /* Types T1 and T2 was found to be incompatible in a context they can't
984 (either used to declare a symbol of same assembler name or unified by
985 ODR rule). We already output warning about this, but if possible, output
986 extra information on how the types mismatch.
987
988 This is hard to do in general. We basically handle the common cases.
989
990 If LOC1 and LOC2 are meaningful locations, use it in the case the types
991 themselves do not have one. */
992
993 void
warn_types_mismatch(tree t1,tree t2,location_t loc1,location_t loc2)994 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
995 {
996 /* Location of type is known only if it has TYPE_NAME and the name is
997 TYPE_DECL. */
998 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
999 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1000 : UNKNOWN_LOCATION;
1001 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1002 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1003 : UNKNOWN_LOCATION;
1004 bool loc_t2_useful = false;
1005
1006 /* With LTO it is a common case that the location of both types match.
1007 See if T2 has a location that is different from T1. If so, we will
1008 inform user about the location.
1009 Do not consider the location passed to us in LOC1/LOC2 as those are
1010 already output. */
1011 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1012 {
1013 if (loc_t1 <= BUILTINS_LOCATION)
1014 loc_t2_useful = true;
1015 else
1016 {
1017 expanded_location xloc1 = expand_location (loc_t1);
1018 expanded_location xloc2 = expand_location (loc_t2);
1019
1020 if (strcmp (xloc1.file, xloc2.file)
1021 || xloc1.line != xloc2.line
1022 || xloc1.column != xloc2.column)
1023 loc_t2_useful = true;
1024 }
1025 }
1026
1027 if (loc_t1 <= BUILTINS_LOCATION)
1028 loc_t1 = loc1;
1029 if (loc_t2 <= BUILTINS_LOCATION)
1030 loc_t2 = loc2;
1031
1032 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1033
1034 /* It is a quite common bug to reference anonymous namespace type in
1035 non-anonymous namespace class. */
1036 tree mt1 = TYPE_MAIN_VARIANT (t1);
1037 tree mt2 = TYPE_MAIN_VARIANT (t2);
1038 if ((type_with_linkage_p (mt1)
1039 && type_in_anonymous_namespace_p (mt1))
1040 || (type_with_linkage_p (mt2)
1041 && type_in_anonymous_namespace_p (mt2)))
1042 {
1043 if (!type_with_linkage_p (mt1)
1044 || !type_in_anonymous_namespace_p (mt1))
1045 {
1046 std::swap (t1, t2);
1047 std::swap (mt1, mt2);
1048 std::swap (loc_t1, loc_t2);
1049 }
1050 gcc_assert (TYPE_NAME (mt1)
1051 && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
1052 tree n1 = TYPE_NAME (mt1);
1053 tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
1054
1055 if (TREE_CODE (n1) == TYPE_DECL)
1056 n1 = DECL_NAME (n1);
1057 if (n2 && TREE_CODE (n2) == TYPE_DECL)
1058 n2 = DECL_NAME (n2);
1059 /* Most of the time, the type names will match, do not be unnecessarily
1060 verbose. */
1061 if (n1 != n2)
1062 inform (loc_t1,
1063 "type %qT defined in anonymous namespace cannot match "
1064 "type %qT across the translation unit boundary",
1065 t1, t2);
1066 else
1067 inform (loc_t1,
1068 "type %qT defined in anonymous namespace cannot match "
1069 "across the translation unit boundary",
1070 t1);
1071 if (loc_t2_useful)
1072 inform (loc_t2,
1073 "the incompatible type defined in another translation unit");
1074 return;
1075 }
1076 /* If types have mangled ODR names and they are different, it is most
1077 informative to output those.
1078 This also covers types defined in different namespaces. */
1079 const char *odr1 = get_odr_name_for_type (mt1);
1080 const char *odr2 = get_odr_name_for_type (mt2);
1081 if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
1082 {
1083 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
1084 char *name1 = xstrdup (cplus_demangle (odr1, opts));
1085 char *name2 = cplus_demangle (odr2, opts);
1086 if (name1 && name2 && strcmp (name1, name2))
1087 {
1088 inform (loc_t1,
1089 "type name %qs should match type name %qs",
1090 name1, name2);
1091 if (loc_t2_useful)
1092 inform (loc_t2,
1093 "the incompatible type is defined here");
1094 free (name1);
1095 return;
1096 }
1097 free (name1);
1098 }
1099 /* A tricky case are compound types. Often they appear the same in source
1100 code and the mismatch is dragged in by type they are build from.
1101 Look for those differences in subtypes and try to be informative. In other
1102 cases just output nothing because the source code is probably different
1103 and in this case we already output a all necessary info. */
1104 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1105 {
1106 if (TREE_CODE (t1) == TREE_CODE (t2))
1107 {
1108 if (TREE_CODE (t1) == ARRAY_TYPE
1109 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1110 {
1111 tree i1 = TYPE_DOMAIN (t1);
1112 tree i2 = TYPE_DOMAIN (t2);
1113
1114 if (i1 && i2
1115 && TYPE_MAX_VALUE (i1)
1116 && TYPE_MAX_VALUE (i2)
1117 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1118 TYPE_MAX_VALUE (i2), 0))
1119 {
1120 inform (loc,
1121 "array types have different bounds");
1122 return;
1123 }
1124 }
1125 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1126 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1127 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1128 else if (TREE_CODE (t1) == METHOD_TYPE
1129 || TREE_CODE (t1) == FUNCTION_TYPE)
1130 {
1131 tree parms1 = NULL, parms2 = NULL;
1132 int count = 1;
1133
1134 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1135 {
1136 inform (loc, "return value type mismatch");
1137 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1138 loc_t2);
1139 return;
1140 }
1141 if (prototype_p (t1) && prototype_p (t2))
1142 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1143 parms1 && parms2;
1144 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1145 count++)
1146 {
1147 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1148 {
1149 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1150 inform (loc,
1151 "implicit this pointer type mismatch");
1152 else
1153 inform (loc,
1154 "type mismatch in parameter %i",
1155 count - (TREE_CODE (t1) == METHOD_TYPE));
1156 warn_types_mismatch (TREE_VALUE (parms1),
1157 TREE_VALUE (parms2),
1158 loc_t1, loc_t2);
1159 return;
1160 }
1161 }
1162 if (parms1 || parms2)
1163 {
1164 inform (loc,
1165 "types have different parameter counts");
1166 return;
1167 }
1168 }
1169 }
1170 return;
1171 }
1172
1173 if (types_odr_comparable (t1, t2)
1174 /* We make assign integers mangled names to be able to handle
1175 signed/unsigned chars. Accepting them here would however lead to
1176 confusing message like
1177 "type ‘const int’ itself violates the C++ One Definition Rule" */
1178 && TREE_CODE (t1) != INTEGER_TYPE
1179 && types_same_for_odr (t1, t2))
1180 inform (loc_t1,
1181 "type %qT itself violates the C++ One Definition Rule", t1);
1182 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1183 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1184 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1185 return;
1186 else
1187 inform (loc_t1, "type %qT should match type %qT",
1188 t1, t2);
1189 if (loc_t2_useful)
1190 inform (loc_t2, "the incompatible type is defined here");
1191 }
1192
1193 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1194
1195 static bool
skip_in_fields_list_p(tree t)1196 skip_in_fields_list_p (tree t)
1197 {
1198 if (TREE_CODE (t) != FIELD_DECL)
1199 return true;
1200 /* C++ FE introduces zero sized fields depending on -std setting, see
1201 PR89358. */
1202 if (DECL_SIZE (t)
1203 && integer_zerop (DECL_SIZE (t))
1204 && DECL_ARTIFICIAL (t)
1205 && DECL_IGNORED_P (t)
1206 && !DECL_NAME (t))
1207 return true;
1208 return false;
1209 }
1210
1211 /* Compare T1 and T2, report ODR violations if WARN is true and set
1212 WARNED to true if anything is reported. Return true if types match.
1213 If true is returned, the types are also compatible in the sense of
1214 gimple_canonical_types_compatible_p.
1215 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1216 about the type if the type itself do not have location. */
1217
1218 static bool
odr_types_equivalent_p(tree t1,tree t2,bool warn,bool * warned,hash_set<type_pair> * visited,location_t loc1,location_t loc2)1219 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1220 hash_set<type_pair> *visited,
1221 location_t loc1, location_t loc2)
1222 {
1223 /* Check first for the obvious case of pointer identity. */
1224 if (t1 == t2)
1225 return true;
1226
1227 /* Can't be the same type if the types don't have the same code. */
1228 if (TREE_CODE (t1) != TREE_CODE (t2))
1229 {
1230 warn_odr (t1, t2, NULL, NULL, warn, warned,
1231 G_("a different type is defined in another translation unit"));
1232 return false;
1233 }
1234
1235 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1236 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1237 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1238 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1239 {
1240 /* We cannot trip this when comparing ODR types, only when trying to
1241 match different ODR derivations from different declarations.
1242 So WARN should be always false. */
1243 gcc_assert (!warn);
1244 return false;
1245 }
1246
1247 /* Non-aggregate types can be handled cheaply. */
1248 if (INTEGRAL_TYPE_P (t1)
1249 || SCALAR_FLOAT_TYPE_P (t1)
1250 || FIXED_POINT_TYPE_P (t1)
1251 || TREE_CODE (t1) == VECTOR_TYPE
1252 || TREE_CODE (t1) == COMPLEX_TYPE
1253 || TREE_CODE (t1) == OFFSET_TYPE
1254 || POINTER_TYPE_P (t1))
1255 {
1256 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1257 {
1258 warn_odr (t1, t2, NULL, NULL, warn, warned,
1259 G_("a type with different precision is defined "
1260 "in another translation unit"));
1261 return false;
1262 }
1263 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1264 {
1265 warn_odr (t1, t2, NULL, NULL, warn, warned,
1266 G_("a type with different signedness is defined "
1267 "in another translation unit"));
1268 return false;
1269 }
1270
1271 if (TREE_CODE (t1) == INTEGER_TYPE
1272 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1273 {
1274 /* char WRT uint_8? */
1275 warn_odr (t1, t2, NULL, NULL, warn, warned,
1276 G_("a different type is defined in another "
1277 "translation unit"));
1278 return false;
1279 }
1280
1281 /* For canonical type comparisons we do not want to build SCCs
1282 so we cannot compare pointed-to types. But we can, for now,
1283 require the same pointed-to type kind and match what
1284 useless_type_conversion_p would do. */
1285 if (POINTER_TYPE_P (t1))
1286 {
1287 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1288 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1289 {
1290 warn_odr (t1, t2, NULL, NULL, warn, warned,
1291 G_("it is defined as a pointer in different address "
1292 "space in another translation unit"));
1293 return false;
1294 }
1295
1296 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1297 visited, loc1, loc2))
1298 {
1299 warn_odr (t1, t2, NULL, NULL, warn, warned,
1300 G_("it is defined as a pointer to different type "
1301 "in another translation unit"));
1302 if (warn && warned)
1303 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1304 loc1, loc2);
1305 return false;
1306 }
1307 }
1308
1309 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1310 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1311 visited, loc1, loc2))
1312 {
1313 /* Probably specific enough. */
1314 warn_odr (t1, t2, NULL, NULL, warn, warned,
1315 G_("a different type is defined "
1316 "in another translation unit"));
1317 if (warn && warned)
1318 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1319 return false;
1320 }
1321 }
1322 /* Do type-specific comparisons. */
1323 else switch (TREE_CODE (t1))
1324 {
1325 case ARRAY_TYPE:
1326 {
1327 /* Array types are the same if the element types are the same and
1328 the number of elements are the same. */
1329 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1330 visited, loc1, loc2))
1331 {
1332 warn_odr (t1, t2, NULL, NULL, warn, warned,
1333 G_("a different type is defined in another "
1334 "translation unit"));
1335 if (warn && warned)
1336 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1337 }
1338 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1339 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1340 == TYPE_NONALIASED_COMPONENT (t2));
1341
1342 tree i1 = TYPE_DOMAIN (t1);
1343 tree i2 = TYPE_DOMAIN (t2);
1344
1345 /* For an incomplete external array, the type domain can be
1346 NULL_TREE. Check this condition also. */
1347 if (i1 == NULL_TREE || i2 == NULL_TREE)
1348 return type_variants_equivalent_p (t1, t2);
1349
1350 tree min1 = TYPE_MIN_VALUE (i1);
1351 tree min2 = TYPE_MIN_VALUE (i2);
1352 tree max1 = TYPE_MAX_VALUE (i1);
1353 tree max2 = TYPE_MAX_VALUE (i2);
1354
1355 /* In C++, minimums should be always 0. */
1356 gcc_assert (min1 == min2);
1357 if (!operand_equal_p (max1, max2, 0))
1358 {
1359 warn_odr (t1, t2, NULL, NULL, warn, warned,
1360 G_("an array of different size is defined "
1361 "in another translation unit"));
1362 return false;
1363 }
1364 }
1365 break;
1366
1367 case METHOD_TYPE:
1368 case FUNCTION_TYPE:
1369 /* Function types are the same if the return type and arguments types
1370 are the same. */
1371 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1372 visited, loc1, loc2))
1373 {
1374 warn_odr (t1, t2, NULL, NULL, warn, warned,
1375 G_("has different return value "
1376 "in another translation unit"));
1377 if (warn && warned)
1378 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1379 return false;
1380 }
1381
1382 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1383 || !prototype_p (t1) || !prototype_p (t2))
1384 return type_variants_equivalent_p (t1, t2);
1385 else
1386 {
1387 tree parms1, parms2;
1388
1389 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1390 parms1 && parms2;
1391 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1392 {
1393 if (!odr_subtypes_equivalent_p
1394 (TREE_VALUE (parms1), TREE_VALUE (parms2),
1395 visited, loc1, loc2))
1396 {
1397 warn_odr (t1, t2, NULL, NULL, warn, warned,
1398 G_("has different parameters in another "
1399 "translation unit"));
1400 if (warn && warned)
1401 warn_types_mismatch (TREE_VALUE (parms1),
1402 TREE_VALUE (parms2), loc1, loc2);
1403 return false;
1404 }
1405 }
1406
1407 if (parms1 || parms2)
1408 {
1409 warn_odr (t1, t2, NULL, NULL, warn, warned,
1410 G_("has different parameters "
1411 "in another translation unit"));
1412 return false;
1413 }
1414
1415 return type_variants_equivalent_p (t1, t2);
1416 }
1417
1418 case RECORD_TYPE:
1419 case UNION_TYPE:
1420 case QUAL_UNION_TYPE:
1421 {
1422 tree f1, f2;
1423
1424 /* For aggregate types, all the fields must be the same. */
1425 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1426 {
1427 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1428 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1429 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1430 {
1431 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1432 warn_odr (t1, t2, NULL, NULL, warn, warned,
1433 G_("a type defined in another translation unit "
1434 "is not polymorphic"));
1435 else
1436 warn_odr (t1, t2, NULL, NULL, warn, warned,
1437 G_("a type defined in another translation unit "
1438 "is polymorphic"));
1439 return false;
1440 }
1441 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1442 f1 || f2;
1443 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1444 {
1445 /* Skip non-fields. */
1446 while (f1 && skip_in_fields_list_p (f1))
1447 f1 = TREE_CHAIN (f1);
1448 while (f2 && skip_in_fields_list_p (f2))
1449 f2 = TREE_CHAIN (f2);
1450 if (!f1 || !f2)
1451 break;
1452 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1453 {
1454 warn_odr (t1, t2, NULL, NULL, warn, warned,
1455 G_("a type with different virtual table pointers"
1456 " is defined in another translation unit"));
1457 return false;
1458 }
1459 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1460 {
1461 warn_odr (t1, t2, NULL, NULL, warn, warned,
1462 G_("a type with different bases is defined "
1463 "in another translation unit"));
1464 return false;
1465 }
1466 if (DECL_NAME (f1) != DECL_NAME (f2)
1467 && !DECL_ARTIFICIAL (f1))
1468 {
1469 warn_odr (t1, t2, f1, f2, warn, warned,
1470 G_("a field with different name is defined "
1471 "in another translation unit"));
1472 return false;
1473 }
1474 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1475 TREE_TYPE (f2),
1476 visited, loc1, loc2))
1477 {
1478 /* Do not warn about artificial fields and just go into
1479 generic field mismatch warning. */
1480 if (DECL_ARTIFICIAL (f1))
1481 break;
1482
1483 warn_odr (t1, t2, f1, f2, warn, warned,
1484 G_("a field of same name but different type "
1485 "is defined in another translation unit"));
1486 if (warn && warned)
1487 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1488 return false;
1489 }
1490 if (!gimple_compare_field_offset (f1, f2))
1491 {
1492 /* Do not warn about artificial fields and just go into
1493 generic field mismatch warning. */
1494 if (DECL_ARTIFICIAL (f1))
1495 break;
1496 warn_odr (t1, t2, f1, f2, warn, warned,
1497 G_("fields have different layout "
1498 "in another translation unit"));
1499 return false;
1500 }
1501 if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1502 {
1503 warn_odr (t1, t2, f1, f2, warn, warned,
1504 G_("one field is a bitfield while the other "
1505 "is not"));
1506 return false;
1507 }
1508 else
1509 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1510 == DECL_NONADDRESSABLE_P (f2));
1511 }
1512
1513 /* If one aggregate has more fields than the other, they
1514 are not the same. */
1515 if (f1 || f2)
1516 {
1517 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1518 warn_odr (t1, t2, NULL, NULL, warn, warned,
1519 G_("a type with different virtual table pointers"
1520 " is defined in another translation unit"));
1521 else if ((f1 && DECL_ARTIFICIAL (f1))
1522 || (f2 && DECL_ARTIFICIAL (f2)))
1523 warn_odr (t1, t2, NULL, NULL, warn, warned,
1524 G_("a type with different bases is defined "
1525 "in another translation unit"));
1526 else
1527 warn_odr (t1, t2, f1, f2, warn, warned,
1528 G_("a type with different number of fields "
1529 "is defined in another translation unit"));
1530
1531 return false;
1532 }
1533 }
1534 break;
1535 }
1536 case VOID_TYPE:
1537 case OPAQUE_TYPE:
1538 case NULLPTR_TYPE:
1539 break;
1540
1541 default:
1542 debug_tree (t1);
1543 gcc_unreachable ();
1544 }
1545
1546 /* Those are better to come last as they are utterly uninformative. */
1547 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1548 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1549 {
1550 warn_odr (t1, t2, NULL, NULL, warn, warned,
1551 G_("a type with different size "
1552 "is defined in another translation unit"));
1553 return false;
1554 }
1555
1556 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2)
1557 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1558 {
1559 warn_odr (t1, t2, NULL, NULL, warn, warned,
1560 G_("one type needs to be constructed while the other does not"));
1561 gcc_checking_assert (RECORD_OR_UNION_TYPE_P (t1));
1562 return false;
1563 }
1564 /* There is no really good user facing warning for this.
1565 Either the original reason for modes being different is lost during
1566 streaming or we should catch earlier warnings. We however must detect
1567 the mismatch to avoid type verifier from cmplaining on mismatched
1568 types between type and canonical type. See PR91576. */
1569 if (TYPE_MODE (t1) != TYPE_MODE (t2)
1570 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1571 {
1572 warn_odr (t1, t2, NULL, NULL, warn, warned,
1573 G_("memory layout mismatch"));
1574 return false;
1575 }
1576
1577 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1578 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1579 TYPE_SIZE_UNIT (t2), 0));
1580 return type_variants_equivalent_p (t1, t2);
1581 }
1582
1583 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1584
1585 bool
odr_types_equivalent_p(tree type1,tree type2)1586 odr_types_equivalent_p (tree type1, tree type2)
1587 {
1588 gcc_checking_assert (odr_or_derived_type_p (type1)
1589 && odr_or_derived_type_p (type2));
1590
1591 hash_set<type_pair> visited;
1592 return odr_types_equivalent_p (type1, type2, false, NULL,
1593 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1594 }
1595
1596 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1597 from VAL->type. This may happen in LTO where tree merging did not merge
1598 all variants of the same type or due to ODR violation.
1599
1600 Analyze and report ODR violations and add type to duplicate list.
1601 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1602 this is first time we see definition of a class return true so the
1603 base types are analyzed. */
1604
1605 static bool
add_type_duplicate(odr_type val,tree type)1606 add_type_duplicate (odr_type val, tree type)
1607 {
1608 bool build_bases = false;
1609 bool prevail = false;
1610 bool odr_must_violate = false;
1611
1612 if (!val->types_set)
1613 val->types_set = new hash_set<tree>;
1614
1615 /* Chose polymorphic type as leader (this happens only in case of ODR
1616 violations. */
1617 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1618 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1619 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1620 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1621 {
1622 prevail = true;
1623 build_bases = true;
1624 }
1625 /* Always prefer complete type to be the leader. */
1626 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1627 {
1628 prevail = true;
1629 if (TREE_CODE (type) == RECORD_TYPE)
1630 build_bases = TYPE_BINFO (type);
1631 }
1632 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1633 ;
1634 else if (TREE_CODE (val->type) == RECORD_TYPE
1635 && TREE_CODE (type) == RECORD_TYPE
1636 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1637 {
1638 gcc_assert (!val->bases.length ());
1639 build_bases = true;
1640 prevail = true;
1641 }
1642
1643 if (prevail)
1644 std::swap (val->type, type);
1645
1646 val->types_set->add (type);
1647
1648 if (!odr_hash)
1649 return false;
1650
1651 gcc_checking_assert (can_be_name_hashed_p (type)
1652 && can_be_name_hashed_p (val->type));
1653
1654 bool merge = true;
1655 bool base_mismatch = false;
1656 unsigned int i;
1657 bool warned = false;
1658 hash_set<type_pair> visited;
1659
1660 gcc_assert (in_lto_p);
1661 vec_safe_push (val->types, type);
1662
1663 /* If both are class types, compare the bases. */
1664 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1665 && TREE_CODE (val->type) == RECORD_TYPE
1666 && TREE_CODE (type) == RECORD_TYPE
1667 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1668 {
1669 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1670 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1671 {
1672 if (!flag_ltrans && !warned && !val->odr_violated)
1673 {
1674 tree extra_base;
1675 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1676 "a type with the same name but different "
1677 "number of polymorphic bases is "
1678 "defined in another translation unit");
1679 if (warned)
1680 {
1681 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1682 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1683 extra_base = BINFO_BASE_BINFO
1684 (TYPE_BINFO (type),
1685 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1686 else
1687 extra_base = BINFO_BASE_BINFO
1688 (TYPE_BINFO (val->type),
1689 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1690 tree extra_base_type = BINFO_TYPE (extra_base);
1691 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1692 "the extra base is defined here");
1693 }
1694 }
1695 base_mismatch = true;
1696 }
1697 else
1698 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1699 {
1700 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1701 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1702 tree type1 = BINFO_TYPE (base1);
1703 tree type2 = BINFO_TYPE (base2);
1704
1705 if (types_odr_comparable (type1, type2))
1706 {
1707 if (!types_same_for_odr (type1, type2))
1708 base_mismatch = true;
1709 }
1710 else
1711 if (!odr_types_equivalent_p (type1, type2))
1712 base_mismatch = true;
1713 if (base_mismatch)
1714 {
1715 if (!warned && !val->odr_violated)
1716 {
1717 warn_odr (type, val->type, NULL, NULL,
1718 !warned, &warned,
1719 "a type with the same name but different base "
1720 "type is defined in another translation unit");
1721 if (warned)
1722 warn_types_mismatch (type1, type2,
1723 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1724 }
1725 break;
1726 }
1727 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1728 {
1729 base_mismatch = true;
1730 if (!warned && !val->odr_violated)
1731 warn_odr (type, val->type, NULL, NULL,
1732 !warned, &warned,
1733 "a type with the same name but different base "
1734 "layout is defined in another translation unit");
1735 break;
1736 }
1737 /* One of bases is not of complete type. */
1738 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1739 {
1740 /* If we have a polymorphic type info specified for TYPE1
1741 but not for TYPE2 we possibly missed a base when recording
1742 VAL->type earlier.
1743 Be sure this does not happen. */
1744 if (TYPE_BINFO (type1)
1745 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1746 && !build_bases)
1747 odr_must_violate = true;
1748 break;
1749 }
1750 /* One base is polymorphic and the other not.
1751 This ought to be diagnosed earlier, but do not ICE in the
1752 checking bellow. */
1753 else if (TYPE_BINFO (type1)
1754 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1755 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1756 {
1757 if (!warned && !val->odr_violated)
1758 warn_odr (type, val->type, NULL, NULL,
1759 !warned, &warned,
1760 "a base of the type is polymorphic only in one "
1761 "translation unit");
1762 base_mismatch = true;
1763 break;
1764 }
1765 }
1766 if (base_mismatch)
1767 {
1768 merge = false;
1769 odr_violation_reported = true;
1770 val->odr_violated = true;
1771
1772 if (symtab->dump_file)
1773 {
1774 fprintf (symtab->dump_file, "ODR base violation\n");
1775
1776 print_node (symtab->dump_file, "", val->type, 0);
1777 putc ('\n',symtab->dump_file);
1778 print_node (symtab->dump_file, "", type, 0);
1779 putc ('\n',symtab->dump_file);
1780 }
1781 }
1782 }
1783
1784 /* Next compare memory layout.
1785 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1786 We must apply the location cache to ensure that they are valid
1787 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1788 if (lto_location_cache::current_cache)
1789 lto_location_cache::current_cache->apply_location_cache ();
1790 /* As a special case we stream mangles names of integer types so we can see
1791 if they are believed to be same even though they have different
1792 representation. Avoid bogus warning on mismatches in these. */
1793 if (TREE_CODE (type) != INTEGER_TYPE
1794 && TREE_CODE (val->type) != INTEGER_TYPE
1795 && !odr_types_equivalent_p (val->type, type,
1796 !flag_ltrans && !val->odr_violated && !warned,
1797 &warned, &visited,
1798 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1799 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1800 {
1801 merge = false;
1802 odr_violation_reported = true;
1803 val->odr_violated = true;
1804 }
1805 gcc_assert (val->odr_violated || !odr_must_violate);
1806 /* Sanity check that all bases will be build same way again. */
1807 if (flag_checking
1808 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1809 && TREE_CODE (val->type) == RECORD_TYPE
1810 && TREE_CODE (type) == RECORD_TYPE
1811 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1812 && !val->odr_violated
1813 && !base_mismatch && val->bases.length ())
1814 {
1815 unsigned int num_poly_bases = 0;
1816 unsigned int j;
1817
1818 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1819 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1820 (TYPE_BINFO (type), i)))
1821 num_poly_bases++;
1822 gcc_assert (num_poly_bases == val->bases.length ());
1823 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1824 i++)
1825 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1826 (TYPE_BINFO (type), i)))
1827 {
1828 odr_type base = get_odr_type
1829 (BINFO_TYPE
1830 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1831 i)),
1832 true);
1833 gcc_assert (val->bases[j] == base);
1834 j++;
1835 }
1836 }
1837
1838
1839 /* Regularize things a little. During LTO same types may come with
1840 different BINFOs. Either because their virtual table was
1841 not merged by tree merging and only later at decl merging or
1842 because one type comes with external vtable, while other
1843 with internal. We want to merge equivalent binfos to conserve
1844 memory and streaming overhead.
1845
1846 The external vtables are more harmful: they contain references
1847 to external declarations of methods that may be defined in the
1848 merged LTO unit. For this reason we absolutely need to remove
1849 them and replace by internal variants. Not doing so will lead
1850 to incomplete answers from possible_polymorphic_call_targets.
1851
1852 FIXME: disable for now; because ODR types are now build during
1853 streaming in, the variants do not need to be linked to the type,
1854 yet. We need to do the merging in cleanup pass to be implemented
1855 soon. */
1856 if (!flag_ltrans && merge
1857 && 0
1858 && TREE_CODE (val->type) == RECORD_TYPE
1859 && TREE_CODE (type) == RECORD_TYPE
1860 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1861 && TYPE_MAIN_VARIANT (type) == type
1862 && TYPE_MAIN_VARIANT (val->type) == val->type
1863 && BINFO_VTABLE (TYPE_BINFO (val->type))
1864 && BINFO_VTABLE (TYPE_BINFO (type)))
1865 {
1866 tree master_binfo = TYPE_BINFO (val->type);
1867 tree v1 = BINFO_VTABLE (master_binfo);
1868 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1869
1870 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1871 {
1872 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1873 && operand_equal_p (TREE_OPERAND (v1, 1),
1874 TREE_OPERAND (v2, 1), 0));
1875 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1876 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1877 }
1878 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1879 == DECL_ASSEMBLER_NAME (v2));
1880
1881 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1882 {
1883 unsigned int i;
1884
1885 set_type_binfo (val->type, TYPE_BINFO (type));
1886 for (i = 0; i < val->types->length (); i++)
1887 {
1888 if (TYPE_BINFO ((*val->types)[i])
1889 == master_binfo)
1890 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1891 }
1892 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1893 }
1894 else
1895 set_type_binfo (type, master_binfo);
1896 }
1897 return build_bases;
1898 }
1899
1900 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to.
1901 FOR_DUMP_P is true when being called from the dump routines. */
1902
1903 tree
obj_type_ref_class(const_tree ref,bool for_dump_p)1904 obj_type_ref_class (const_tree ref, bool for_dump_p)
1905 {
1906 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1907 ref = TREE_TYPE (ref);
1908 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1909 ref = TREE_TYPE (ref);
1910 /* We look for type THIS points to. ObjC also builds
1911 OBJ_TYPE_REF with non-method calls, Their first parameter
1912 ID however also corresponds to class type. */
1913 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1914 || TREE_CODE (ref) == FUNCTION_TYPE);
1915 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1916 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1917 tree ret = TREE_TYPE (ref);
1918 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
1919 ret = TYPE_CANONICAL (ret);
1920 else if (odr_type ot = get_odr_type (ret, !for_dump_p))
1921 ret = ot->type;
1922 else
1923 gcc_assert (for_dump_p);
1924 return ret;
1925 }
1926
1927 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1928 possibly new entry. */
1929
1930 odr_type
get_odr_type(tree type,bool insert)1931 get_odr_type (tree type, bool insert)
1932 {
1933 odr_type_d **slot = NULL;
1934 odr_type val = NULL;
1935 hashval_t hash;
1936 bool build_bases = false;
1937 bool insert_to_odr_array = false;
1938 int base_id = -1;
1939
1940 type = TYPE_MAIN_VARIANT (type);
1941 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
1942 type = TYPE_CANONICAL (type);
1943
1944 gcc_checking_assert (can_be_name_hashed_p (type));
1945
1946 hash = hash_odr_name (type);
1947 slot = odr_hash->find_slot_with_hash (type, hash,
1948 insert ? INSERT : NO_INSERT);
1949
1950 if (!slot)
1951 return NULL;
1952
1953 /* See if we already have entry for type. */
1954 if (*slot)
1955 {
1956 val = *slot;
1957
1958 if (val->type != type && insert
1959 && (!val->types_set || !val->types_set->add (type)))
1960 build_bases = add_type_duplicate (val, type);
1961 }
1962 else
1963 {
1964 val = ggc_cleared_alloc<odr_type_d> ();
1965 val->type = type;
1966 val->bases = vNULL;
1967 val->derived_types = vNULL;
1968 if (type_with_linkage_p (type))
1969 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1970 else
1971 val->anonymous_namespace = 0;
1972 build_bases = COMPLETE_TYPE_P (val->type);
1973 insert_to_odr_array = true;
1974 *slot = val;
1975 }
1976
1977 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1978 && type_with_linkage_p (type)
1979 && type == TYPE_MAIN_VARIANT (type))
1980 {
1981 tree binfo = TYPE_BINFO (type);
1982 unsigned int i;
1983
1984 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1985
1986 val->all_derivations_known = type_all_derivations_known_p (type);
1987 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1988 /* For now record only polymorphic types. other are
1989 pointless for devirtualization and we cannot precisely
1990 determine ODR equivalency of these during LTO. */
1991 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1992 {
1993 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1994 odr_type base = get_odr_type (base_type, true);
1995 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1996 base->derived_types.safe_push (val);
1997 val->bases.safe_push (base);
1998 if (base->id > base_id)
1999 base_id = base->id;
2000 }
2001 }
2002 /* Ensure that type always appears after bases. */
2003 if (insert_to_odr_array)
2004 {
2005 if (odr_types_ptr)
2006 val->id = odr_types.length ();
2007 vec_safe_push (odr_types_ptr, val);
2008 }
2009 else if (base_id > val->id)
2010 {
2011 odr_types[val->id] = 0;
2012 /* Be sure we did not recorded any derived types; these may need
2013 renumbering too. */
2014 gcc_assert (val->derived_types.length() == 0);
2015 val->id = odr_types.length ();
2016 vec_safe_push (odr_types_ptr, val);
2017 }
2018 return val;
2019 }
2020
2021 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
2022 on ODR violations. */
2023
2024 tree
prevailing_odr_type(tree type)2025 prevailing_odr_type (tree type)
2026 {
2027 odr_type t = get_odr_type (type, false);
2028 if (!t || t->odr_violated)
2029 return type;
2030 return t->type;
2031 }
2032
2033 /* Set tbaa_enabled flag for TYPE. */
2034
2035 void
enable_odr_based_tbaa(tree type)2036 enable_odr_based_tbaa (tree type)
2037 {
2038 odr_type t = get_odr_type (type, true);
2039 t->tbaa_enabled = true;
2040 }
2041
2042 /* True if canonical type of TYPE is determined using ODR name. */
2043
2044 bool
odr_based_tbaa_p(const_tree type)2045 odr_based_tbaa_p (const_tree type)
2046 {
2047 if (!RECORD_OR_UNION_TYPE_P (type))
2048 return false;
2049 if (!odr_hash)
2050 return false;
2051 odr_type t = get_odr_type (const_cast <tree> (type), false);
2052 if (!t || !t->tbaa_enabled)
2053 return false;
2054 return true;
2055 }
2056
2057 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2058 to CANONICAL. */
2059
2060 void
set_type_canonical_for_odr_type(tree type,tree canonical)2061 set_type_canonical_for_odr_type (tree type, tree canonical)
2062 {
2063 odr_type t = get_odr_type (type, false);
2064 unsigned int i;
2065 tree tt;
2066
2067 for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2068 TYPE_CANONICAL (t2) = canonical;
2069 if (t->types)
2070 FOR_EACH_VEC_ELT (*t->types, i, tt)
2071 for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2072 TYPE_CANONICAL (t2) = canonical;
2073 }
2074
2075 /* Return true if we reported some ODR violation on TYPE. */
2076
2077 bool
odr_type_violation_reported_p(tree type)2078 odr_type_violation_reported_p (tree type)
2079 {
2080 return get_odr_type (type, false)->odr_violated;
2081 }
2082
2083 /* Add TYPE of ODR type hash. */
2084
2085 void
register_odr_type(tree type)2086 register_odr_type (tree type)
2087 {
2088 if (!odr_hash)
2089 odr_hash = new odr_hash_type (23);
2090 if (type == TYPE_MAIN_VARIANT (type))
2091 {
2092 /* To get ODR warnings right, first register all sub-types. */
2093 if (RECORD_OR_UNION_TYPE_P (type)
2094 && COMPLETE_TYPE_P (type))
2095 {
2096 /* Limit recursion on types which are already registered. */
2097 odr_type ot = get_odr_type (type, false);
2098 if (ot
2099 && (ot->type == type
2100 || (ot->types_set
2101 && ot->types_set->contains (type))))
2102 return;
2103 for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2104 if (TREE_CODE (f) == FIELD_DECL)
2105 {
2106 tree subtype = TREE_TYPE (f);
2107
2108 while (TREE_CODE (subtype) == ARRAY_TYPE)
2109 subtype = TREE_TYPE (subtype);
2110 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2111 register_odr_type (TYPE_MAIN_VARIANT (subtype));
2112 }
2113 if (TYPE_BINFO (type))
2114 for (unsigned int i = 0;
2115 i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2116 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2117 (TYPE_BINFO (type), i)));
2118 }
2119 get_odr_type (type, true);
2120 }
2121 }
2122
2123 /* Return true if type is known to have no derivations. */
2124
2125 bool
type_known_to_have_no_derivations_p(tree t)2126 type_known_to_have_no_derivations_p (tree t)
2127 {
2128 return (type_all_derivations_known_p (t)
2129 && (TYPE_FINAL_P (t)
2130 || (odr_hash
2131 && !get_odr_type (t, true)->derived_types.length())));
2132 }
2133
2134 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2135 recursive printing. */
2136
2137 static void
dump_odr_type(FILE * f,odr_type t,int indent=0)2138 dump_odr_type (FILE *f, odr_type t, int indent=0)
2139 {
2140 unsigned int i;
2141 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2142 print_generic_expr (f, t->type, TDF_SLIM);
2143 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2144 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2145 if (TYPE_NAME (t->type))
2146 {
2147 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2148 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2149 IDENTIFIER_POINTER
2150 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2151 }
2152 if (t->bases.length ())
2153 {
2154 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2155 for (i = 0; i < t->bases.length (); i++)
2156 fprintf (f, " %i", t->bases[i]->id);
2157 fprintf (f, "\n");
2158 }
2159 if (t->derived_types.length ())
2160 {
2161 fprintf (f, "%*s derived types:\n", indent * 2, "");
2162 for (i = 0; i < t->derived_types.length (); i++)
2163 dump_odr_type (f, t->derived_types[i], indent + 1);
2164 }
2165 fprintf (f, "\n");
2166 }
2167
2168 /* Dump the type inheritance graph. */
2169
2170 static void
dump_type_inheritance_graph(FILE * f)2171 dump_type_inheritance_graph (FILE *f)
2172 {
2173 unsigned int i;
2174 unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2175 if (!odr_types_ptr)
2176 return;
2177 fprintf (f, "\n\nType inheritance graph:\n");
2178 for (i = 0; i < odr_types.length (); i++)
2179 {
2180 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2181 dump_odr_type (f, odr_types[i]);
2182 }
2183 for (i = 0; i < odr_types.length (); i++)
2184 {
2185 if (!odr_types[i])
2186 continue;
2187
2188 num_all_types++;
2189 if (!odr_types[i]->types || !odr_types[i]->types->length ())
2190 continue;
2191
2192 /* To aid ODR warnings we also mangle integer constants but do
2193 not consider duplicates there. */
2194 if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2195 continue;
2196
2197 /* It is normal to have one duplicate and one normal variant. */
2198 if (odr_types[i]->types->length () == 1
2199 && COMPLETE_TYPE_P (odr_types[i]->type)
2200 && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2201 continue;
2202
2203 num_types ++;
2204
2205 unsigned int j;
2206 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2207 print_node (f, "", odr_types[i]->type, 0);
2208 print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2209 putc ('\n',f);
2210 for (j = 0; j < odr_types[i]->types->length (); j++)
2211 {
2212 tree t;
2213 num_duplicates ++;
2214 fprintf (f, "duplicate #%i\n", j);
2215 print_node (f, "", (*odr_types[i]->types)[j], 0);
2216 t = (*odr_types[i]->types)[j];
2217 while (TYPE_P (t) && TYPE_CONTEXT (t))
2218 {
2219 t = TYPE_CONTEXT (t);
2220 print_node (f, "", t, 0);
2221 }
2222 print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2223 putc ('\n',f);
2224 }
2225 }
2226 fprintf (f, "Out of %i types there are %i types with duplicates; "
2227 "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2228 }
2229
2230 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2231 ODR warnings.
2232 We make TYPE_DECLs to not point back
2233 to the type (which is needed to keep them in the same SCC and preserve
2234 location information to output warnings) and subsequently we make all
2235 TYPE_DECLS of same assembler name equivalent. */
2236
2237 static void
free_odr_warning_data()2238 free_odr_warning_data ()
2239 {
2240 static bool odr_data_freed = false;
2241
2242 if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2243 return;
2244
2245 odr_data_freed = true;
2246
2247 for (unsigned int i = 0; i < odr_types.length (); i++)
2248 if (odr_types[i])
2249 {
2250 tree t = odr_types[i]->type;
2251
2252 TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2253
2254 if (odr_types[i]->types)
2255 for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2256 {
2257 tree td = (*odr_types[i]->types)[j];
2258
2259 TYPE_NAME (td) = TYPE_NAME (t);
2260 }
2261 }
2262 odr_data_freed = true;
2263 }
2264
2265 /* Initialize IPA devirt and build inheritance tree graph. */
2266
2267 void
build_type_inheritance_graph(void)2268 build_type_inheritance_graph (void)
2269 {
2270 struct symtab_node *n;
2271 FILE *inheritance_dump_file;
2272 dump_flags_t flags;
2273
2274 if (odr_hash)
2275 {
2276 free_odr_warning_data ();
2277 return;
2278 }
2279 timevar_push (TV_IPA_INHERITANCE);
2280 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2281 odr_hash = new odr_hash_type (23);
2282
2283 /* We reconstruct the graph starting of types of all methods seen in the
2284 unit. */
2285 FOR_EACH_SYMBOL (n)
2286 if (is_a <cgraph_node *> (n)
2287 && DECL_VIRTUAL_P (n->decl)
2288 && n->real_symbol_p ())
2289 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2290
2291 /* Look also for virtual tables of types that do not define any methods.
2292
2293 We need it in a case where class B has virtual base of class A
2294 re-defining its virtual method and there is class C with no virtual
2295 methods with B as virtual base.
2296
2297 Here we output B's virtual method in two variant - for non-virtual
2298 and virtual inheritance. B's virtual table has non-virtual version,
2299 while C's has virtual.
2300
2301 For this reason we need to know about C in order to include both
2302 variants of B. More correctly, record_target_from_binfo should
2303 add both variants of the method when walking B, but we have no
2304 link in between them.
2305
2306 We rely on fact that either the method is exported and thus we
2307 assume it is called externally or C is in anonymous namespace and
2308 thus we will see the vtable. */
2309
2310 else if (is_a <varpool_node *> (n)
2311 && DECL_VIRTUAL_P (n->decl)
2312 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2313 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2314 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2315 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2316 if (inheritance_dump_file)
2317 {
2318 dump_type_inheritance_graph (inheritance_dump_file);
2319 dump_end (TDI_inheritance, inheritance_dump_file);
2320 }
2321 free_odr_warning_data ();
2322 timevar_pop (TV_IPA_INHERITANCE);
2323 }
2324
2325 /* Return true if N has reference from live virtual table
2326 (and thus can be a destination of polymorphic call).
2327 Be conservatively correct when callgraph is not built or
2328 if the method may be referred externally. */
2329
2330 static bool
referenced_from_vtable_p(struct cgraph_node * node)2331 referenced_from_vtable_p (struct cgraph_node *node)
2332 {
2333 int i;
2334 struct ipa_ref *ref;
2335 bool found = false;
2336
2337 if (node->externally_visible
2338 || DECL_EXTERNAL (node->decl)
2339 || node->used_from_other_partition)
2340 return true;
2341
2342 /* Keep this test constant time.
2343 It is unlikely this can happen except for the case where speculative
2344 devirtualization introduced many speculative edges to this node.
2345 In this case the target is very likely alive anyway. */
2346 if (node->ref_list.referring.length () > 100)
2347 return true;
2348
2349 /* We need references built. */
2350 if (symtab->state <= CONSTRUCTION)
2351 return true;
2352
2353 for (i = 0; node->iterate_referring (i, ref); i++)
2354 if ((ref->use == IPA_REF_ALIAS
2355 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2356 || (ref->use == IPA_REF_ADDR
2357 && VAR_P (ref->referring->decl)
2358 && DECL_VIRTUAL_P (ref->referring->decl)))
2359 {
2360 found = true;
2361 break;
2362 }
2363 return found;
2364 }
2365
2366 /* Return if TARGET is cxa_pure_virtual. */
2367
2368 static bool
is_cxa_pure_virtual_p(tree target)2369 is_cxa_pure_virtual_p (tree target)
2370 {
2371 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2372 && DECL_NAME (target)
2373 && id_equal (DECL_NAME (target),
2374 "__cxa_pure_virtual");
2375 }
2376
2377 /* If TARGET has associated node, record it in the NODES array.
2378 CAN_REFER specify if program can refer to the target directly.
2379 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2380 its body was already removed and there is no way to refer to it), clear
2381 COMPLETEP. */
2382
2383 static void
maybe_record_node(vec<cgraph_node * > & nodes,tree target,hash_set<tree> * inserted,bool can_refer,bool * completep)2384 maybe_record_node (vec <cgraph_node *> &nodes,
2385 tree target, hash_set<tree> *inserted,
2386 bool can_refer,
2387 bool *completep)
2388 {
2389 struct cgraph_node *target_node, *alias_target;
2390 enum availability avail;
2391 bool pure_virtual = is_cxa_pure_virtual_p (target);
2392
2393 /* __builtin_unreachable do not need to be added into
2394 list of targets; the runtime effect of calling them is undefined.
2395 Only "real" virtual methods should be accounted. */
2396 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2397 return;
2398
2399 if (!can_refer)
2400 {
2401 /* The only case when method of anonymous namespace becomes unreferable
2402 is when we completely optimized it out. */
2403 if (flag_ltrans
2404 || !target
2405 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2406 *completep = false;
2407 return;
2408 }
2409
2410 if (!target)
2411 return;
2412
2413 target_node = cgraph_node::get (target);
2414
2415 /* Prefer alias target over aliases, so we do not get confused by
2416 fake duplicates. */
2417 if (target_node)
2418 {
2419 alias_target = target_node->ultimate_alias_target (&avail);
2420 if (target_node != alias_target
2421 && avail >= AVAIL_AVAILABLE
2422 && target_node->get_availability ())
2423 target_node = alias_target;
2424 }
2425
2426 /* Method can only be called by polymorphic call if any
2427 of vtables referring to it are alive.
2428
2429 While this holds for non-anonymous functions, too, there are
2430 cases where we want to keep them in the list; for example
2431 inline functions with -fno-weak are static, but we still
2432 may devirtualize them when instance comes from other unit.
2433 The same holds for LTO.
2434
2435 Currently we ignore these functions in speculative devirtualization.
2436 ??? Maybe it would make sense to be more aggressive for LTO even
2437 elsewhere. */
2438 if (!flag_ltrans
2439 && !pure_virtual
2440 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2441 && (!target_node
2442 || !referenced_from_vtable_p (target_node)))
2443 ;
2444 /* See if TARGET is useful function we can deal with. */
2445 else if (target_node != NULL
2446 && (TREE_PUBLIC (target)
2447 || DECL_EXTERNAL (target)
2448 || target_node->definition)
2449 && target_node->real_symbol_p ())
2450 {
2451 gcc_assert (!target_node->inlined_to);
2452 gcc_assert (target_node->real_symbol_p ());
2453 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2454 by valid program. */
2455 if (flag_sanitize & SANITIZE_UNREACHABLE)
2456 ;
2457 /* Only add pure virtual if it is the only possible target. This way
2458 we will preserve the diagnostics about pure virtual called in many
2459 cases without disabling optimization in other. */
2460 else if (pure_virtual)
2461 {
2462 if (nodes.length ())
2463 return;
2464 }
2465 /* If we found a real target, take away cxa_pure_virtual. */
2466 else if (!pure_virtual && nodes.length () == 1
2467 && is_cxa_pure_virtual_p (nodes[0]->decl))
2468 nodes.pop ();
2469 if (pure_virtual && nodes.length ())
2470 return;
2471 if (!inserted->add (target))
2472 {
2473 cached_polymorphic_call_targets->add (target_node);
2474 nodes.safe_push (target_node);
2475 }
2476 }
2477 else if (!completep)
2478 ;
2479 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2480 optimized out or partitioned to other unit) so we cannot add it. When
2481 not sanitizing, there is nothing to do.
2482 Otherwise declare the list incomplete. */
2483 else if (pure_virtual)
2484 {
2485 if (flag_sanitize & SANITIZE_UNREACHABLE)
2486 *completep = false;
2487 }
2488 else if (flag_ltrans
2489 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2490 *completep = false;
2491 }
2492
2493 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2494 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2495 method in vtable and insert method to NODES array
2496 or BASES_TO_CONSIDER if this array is non-NULL.
2497 Otherwise recurse to base BINFOs.
2498 This matches what get_binfo_at_offset does, but with offset
2499 being unknown.
2500
2501 TYPE_BINFOS is a stack of BINFOS of types with defined
2502 virtual table seen on way from class type to BINFO.
2503
2504 MATCHED_VTABLES tracks virtual tables we already did lookup
2505 for virtual function in. INSERTED tracks nodes we already
2506 inserted.
2507
2508 ANONYMOUS is true if BINFO is part of anonymous namespace.
2509
2510 Clear COMPLETEP when we hit unreferable target.
2511 */
2512
2513 static void
record_target_from_binfo(vec<cgraph_node * > & nodes,vec<tree> * bases_to_consider,tree binfo,tree otr_type,vec<tree> & type_binfos,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,hash_set<tree> * inserted,hash_set<tree> * matched_vtables,bool anonymous,bool * completep)2514 record_target_from_binfo (vec <cgraph_node *> &nodes,
2515 vec <tree> *bases_to_consider,
2516 tree binfo,
2517 tree otr_type,
2518 vec <tree> &type_binfos,
2519 HOST_WIDE_INT otr_token,
2520 tree outer_type,
2521 HOST_WIDE_INT offset,
2522 hash_set<tree> *inserted,
2523 hash_set<tree> *matched_vtables,
2524 bool anonymous,
2525 bool *completep)
2526 {
2527 tree type = BINFO_TYPE (binfo);
2528 int i;
2529 tree base_binfo;
2530
2531
2532 if (BINFO_VTABLE (binfo))
2533 type_binfos.safe_push (binfo);
2534 if (types_same_for_odr (type, outer_type))
2535 {
2536 int i;
2537 tree type_binfo = NULL;
2538
2539 /* Look up BINFO with virtual table. For normal types it is always last
2540 binfo on stack. */
2541 for (i = type_binfos.length () - 1; i >= 0; i--)
2542 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2543 {
2544 type_binfo = type_binfos[i];
2545 break;
2546 }
2547 if (BINFO_VTABLE (binfo))
2548 type_binfos.pop ();
2549 /* If this is duplicated BINFO for base shared by virtual inheritance,
2550 we may not have its associated vtable. This is not a problem, since
2551 we will walk it on the other path. */
2552 if (!type_binfo)
2553 return;
2554 tree inner_binfo = get_binfo_at_offset (type_binfo,
2555 offset, otr_type);
2556 if (!inner_binfo)
2557 {
2558 gcc_assert (odr_violation_reported);
2559 return;
2560 }
2561 /* For types in anonymous namespace first check if the respective vtable
2562 is alive. If not, we know the type can't be called. */
2563 if (!flag_ltrans && anonymous)
2564 {
2565 tree vtable = BINFO_VTABLE (inner_binfo);
2566 varpool_node *vnode;
2567
2568 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2569 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2570 vnode = varpool_node::get (vtable);
2571 if (!vnode || !vnode->definition)
2572 return;
2573 }
2574 gcc_assert (inner_binfo);
2575 if (bases_to_consider
2576 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2577 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2578 {
2579 bool can_refer;
2580 tree target = gimple_get_virt_method_for_binfo (otr_token,
2581 inner_binfo,
2582 &can_refer);
2583 if (!bases_to_consider)
2584 maybe_record_node (nodes, target, inserted, can_refer, completep);
2585 /* Destructors are never called via construction vtables. */
2586 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2587 bases_to_consider->safe_push (target);
2588 }
2589 return;
2590 }
2591
2592 /* Walk bases. */
2593 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2594 /* Walking bases that have no virtual method is pointless exercise. */
2595 if (polymorphic_type_binfo_p (base_binfo))
2596 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2597 type_binfos,
2598 otr_token, outer_type, offset, inserted,
2599 matched_vtables, anonymous, completep);
2600 if (BINFO_VTABLE (binfo))
2601 type_binfos.pop ();
2602 }
2603
2604 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2605 of TYPE, insert them to NODES, recurse into derived nodes.
2606 INSERTED is used to avoid duplicate insertions of methods into NODES.
2607 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2608 Clear COMPLETEP if unreferable target is found.
2609
2610 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2611 all cases where BASE_SKIPPED is true (because the base is abstract
2612 class). */
2613
2614 static void
possible_polymorphic_call_targets_1(vec<cgraph_node * > & nodes,hash_set<tree> * inserted,hash_set<tree> * matched_vtables,tree otr_type,odr_type type,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,bool * completep,vec<tree> & bases_to_consider,bool consider_construction)2615 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2616 hash_set<tree> *inserted,
2617 hash_set<tree> *matched_vtables,
2618 tree otr_type,
2619 odr_type type,
2620 HOST_WIDE_INT otr_token,
2621 tree outer_type,
2622 HOST_WIDE_INT offset,
2623 bool *completep,
2624 vec <tree> &bases_to_consider,
2625 bool consider_construction)
2626 {
2627 tree binfo = TYPE_BINFO (type->type);
2628 unsigned int i;
2629 auto_vec <tree, 8> type_binfos;
2630 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2631
2632 /* We may need to consider types w/o instances because of possible derived
2633 types using their methods either directly or via construction vtables.
2634 We are safe to skip them when all derivations are known, since we will
2635 handle them later.
2636 This is done by recording them to BASES_TO_CONSIDER array. */
2637 if (possibly_instantiated || consider_construction)
2638 {
2639 record_target_from_binfo (nodes,
2640 (!possibly_instantiated
2641 && type_all_derivations_known_p (type->type))
2642 ? &bases_to_consider : NULL,
2643 binfo, otr_type, type_binfos, otr_token,
2644 outer_type, offset,
2645 inserted, matched_vtables,
2646 type->anonymous_namespace, completep);
2647 }
2648 for (i = 0; i < type->derived_types.length (); i++)
2649 possible_polymorphic_call_targets_1 (nodes, inserted,
2650 matched_vtables,
2651 otr_type,
2652 type->derived_types[i],
2653 otr_token, outer_type, offset, completep,
2654 bases_to_consider, consider_construction);
2655 }
2656
2657 /* Cache of queries for polymorphic call targets.
2658
2659 Enumerating all call targets may get expensive when there are many
2660 polymorphic calls in the program, so we memoize all the previous
2661 queries and avoid duplicated work. */
2662
2663 class polymorphic_call_target_d
2664 {
2665 public:
2666 HOST_WIDE_INT otr_token;
2667 ipa_polymorphic_call_context context;
2668 odr_type type;
2669 vec <cgraph_node *> targets;
2670 tree decl_warning;
2671 int type_warning;
2672 unsigned int n_odr_types;
2673 bool complete;
2674 bool speculative;
2675 };
2676
2677 /* Polymorphic call target cache helpers. */
2678
2679 struct polymorphic_call_target_hasher
2680 : pointer_hash <polymorphic_call_target_d>
2681 {
2682 static inline hashval_t hash (const polymorphic_call_target_d *);
2683 static inline bool equal (const polymorphic_call_target_d *,
2684 const polymorphic_call_target_d *);
2685 static inline void remove (polymorphic_call_target_d *);
2686 };
2687
2688 /* Return the computed hashcode for ODR_QUERY. */
2689
2690 inline hashval_t
hash(const polymorphic_call_target_d * odr_query)2691 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2692 {
2693 inchash::hash hstate (odr_query->otr_token);
2694
2695 hstate.add_hwi (odr_query->type->id);
2696 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2697 hstate.add_hwi (odr_query->context.offset);
2698 hstate.add_hwi (odr_query->n_odr_types);
2699
2700 if (odr_query->context.speculative_outer_type)
2701 {
2702 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2703 hstate.add_hwi (odr_query->context.speculative_offset);
2704 }
2705 hstate.add_flag (odr_query->speculative);
2706 hstate.add_flag (odr_query->context.maybe_in_construction);
2707 hstate.add_flag (odr_query->context.maybe_derived_type);
2708 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2709 hstate.commit_flag ();
2710 return hstate.end ();
2711 }
2712
2713 /* Compare cache entries T1 and T2. */
2714
2715 inline bool
equal(const polymorphic_call_target_d * t1,const polymorphic_call_target_d * t2)2716 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2717 const polymorphic_call_target_d *t2)
2718 {
2719 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2720 && t1->speculative == t2->speculative
2721 && t1->context.offset == t2->context.offset
2722 && t1->context.speculative_offset == t2->context.speculative_offset
2723 && t1->context.outer_type == t2->context.outer_type
2724 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2725 && t1->context.maybe_in_construction
2726 == t2->context.maybe_in_construction
2727 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2728 && (t1->context.speculative_maybe_derived_type
2729 == t2->context.speculative_maybe_derived_type)
2730 /* Adding new type may affect outcome of target search. */
2731 && t1->n_odr_types == t2->n_odr_types);
2732 }
2733
2734 /* Remove entry in polymorphic call target cache hash. */
2735
2736 inline void
remove(polymorphic_call_target_d * v)2737 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2738 {
2739 v->targets.release ();
2740 free (v);
2741 }
2742
2743 /* Polymorphic call target query cache. */
2744
2745 typedef hash_table<polymorphic_call_target_hasher>
2746 polymorphic_call_target_hash_type;
2747 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2748
2749 /* Destroy polymorphic call target query cache. */
2750
2751 static void
free_polymorphic_call_targets_hash()2752 free_polymorphic_call_targets_hash ()
2753 {
2754 if (cached_polymorphic_call_targets)
2755 {
2756 delete polymorphic_call_target_hash;
2757 polymorphic_call_target_hash = NULL;
2758 delete cached_polymorphic_call_targets;
2759 cached_polymorphic_call_targets = NULL;
2760 }
2761 }
2762
2763 /* Force rebuilding type inheritance graph from scratch.
2764 This is use to make sure that we do not keep references to types
2765 which was not visible to free_lang_data. */
2766
2767 void
rebuild_type_inheritance_graph()2768 rebuild_type_inheritance_graph ()
2769 {
2770 if (!odr_hash)
2771 return;
2772 delete odr_hash;
2773 odr_hash = NULL;
2774 odr_types_ptr = NULL;
2775 free_polymorphic_call_targets_hash ();
2776 }
2777
2778 /* When virtual function is removed, we may need to flush the cache. */
2779
2780 static void
devirt_node_removal_hook(struct cgraph_node * n,void * d ATTRIBUTE_UNUSED)2781 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2782 {
2783 if (cached_polymorphic_call_targets
2784 && !thunk_expansion
2785 && cached_polymorphic_call_targets->contains (n))
2786 free_polymorphic_call_targets_hash ();
2787 }
2788
2789 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2790
2791 tree
subbinfo_with_vtable_at_offset(tree binfo,unsigned HOST_WIDE_INT offset,tree vtable)2792 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2793 tree vtable)
2794 {
2795 tree v = BINFO_VTABLE (binfo);
2796 int i;
2797 tree base_binfo;
2798 unsigned HOST_WIDE_INT this_offset;
2799
2800 if (v)
2801 {
2802 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2803 gcc_unreachable ();
2804
2805 if (offset == this_offset
2806 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2807 return binfo;
2808 }
2809
2810 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2811 if (polymorphic_type_binfo_p (base_binfo))
2812 {
2813 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2814 if (base_binfo)
2815 return base_binfo;
2816 }
2817 return NULL;
2818 }
2819
2820 /* T is known constant value of virtual table pointer.
2821 Store virtual table to V and its offset to OFFSET.
2822 Return false if T does not look like virtual table reference. */
2823
2824 bool
vtable_pointer_value_to_vtable(const_tree t,tree * v,unsigned HOST_WIDE_INT * offset)2825 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2826 unsigned HOST_WIDE_INT *offset)
2827 {
2828 /* We expect &MEM[(void *)&virtual_table + 16B].
2829 We obtain object's BINFO from the context of the virtual table.
2830 This one contains pointer to virtual table represented via
2831 POINTER_PLUS_EXPR. Verify that this pointer matches what
2832 we propagated through.
2833
2834 In the case of virtual inheritance, the virtual tables may
2835 be nested, i.e. the offset may be different from 16 and we may
2836 need to dive into the type representation. */
2837 if (TREE_CODE (t) == ADDR_EXPR
2838 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2839 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2840 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2841 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2842 == VAR_DECL)
2843 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2844 (TREE_OPERAND (t, 0), 0), 0)))
2845 {
2846 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2847 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2848 return true;
2849 }
2850
2851 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2852 We need to handle it when T comes from static variable initializer or
2853 BINFO. */
2854 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2855 {
2856 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2857 t = TREE_OPERAND (t, 0);
2858 }
2859 else
2860 *offset = 0;
2861
2862 if (TREE_CODE (t) != ADDR_EXPR)
2863 return false;
2864 *v = TREE_OPERAND (t, 0);
2865 return true;
2866 }
2867
2868 /* T is known constant value of virtual table pointer. Return BINFO of the
2869 instance type. */
2870
2871 tree
vtable_pointer_value_to_binfo(const_tree t)2872 vtable_pointer_value_to_binfo (const_tree t)
2873 {
2874 tree vtable;
2875 unsigned HOST_WIDE_INT offset;
2876
2877 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2878 return NULL_TREE;
2879
2880 /* FIXME: for stores of construction vtables we return NULL,
2881 because we do not have BINFO for those. Eventually we should fix
2882 our representation to allow this case to be handled, too.
2883 In the case we see store of BINFO we however may assume
2884 that standard folding will be able to cope with it. */
2885 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2886 offset, vtable);
2887 }
2888
2889 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2890 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2891 and insert them in NODES.
2892
2893 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2894
2895 static void
record_targets_from_bases(tree otr_type,HOST_WIDE_INT otr_token,tree outer_type,HOST_WIDE_INT offset,vec<cgraph_node * > & nodes,hash_set<tree> * inserted,hash_set<tree> * matched_vtables,bool * completep)2896 record_targets_from_bases (tree otr_type,
2897 HOST_WIDE_INT otr_token,
2898 tree outer_type,
2899 HOST_WIDE_INT offset,
2900 vec <cgraph_node *> &nodes,
2901 hash_set<tree> *inserted,
2902 hash_set<tree> *matched_vtables,
2903 bool *completep)
2904 {
2905 while (true)
2906 {
2907 HOST_WIDE_INT pos, size;
2908 tree base_binfo;
2909 tree fld;
2910
2911 if (types_same_for_odr (outer_type, otr_type))
2912 return;
2913
2914 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2915 {
2916 if (TREE_CODE (fld) != FIELD_DECL)
2917 continue;
2918
2919 pos = int_bit_position (fld);
2920 size = tree_to_shwi (DECL_SIZE (fld));
2921 if (pos <= offset && (pos + size) > offset
2922 /* Do not get confused by zero sized bases. */
2923 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2924 break;
2925 }
2926 /* Within a class type we should always find corresponding fields. */
2927 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2928
2929 /* Nonbase types should have been stripped by outer_class_type. */
2930 gcc_assert (DECL_ARTIFICIAL (fld));
2931
2932 outer_type = TREE_TYPE (fld);
2933 offset -= pos;
2934
2935 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2936 offset, otr_type);
2937 if (!base_binfo)
2938 {
2939 gcc_assert (odr_violation_reported);
2940 return;
2941 }
2942 gcc_assert (base_binfo);
2943 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2944 {
2945 bool can_refer;
2946 tree target = gimple_get_virt_method_for_binfo (otr_token,
2947 base_binfo,
2948 &can_refer);
2949 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2950 maybe_record_node (nodes, target, inserted, can_refer, completep);
2951 matched_vtables->add (BINFO_VTABLE (base_binfo));
2952 }
2953 }
2954 }
2955
2956 /* When virtual table is removed, we may need to flush the cache. */
2957
2958 static void
devirt_variable_node_removal_hook(varpool_node * n,void * d ATTRIBUTE_UNUSED)2959 devirt_variable_node_removal_hook (varpool_node *n,
2960 void *d ATTRIBUTE_UNUSED)
2961 {
2962 if (cached_polymorphic_call_targets
2963 && DECL_VIRTUAL_P (n->decl)
2964 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2965 free_polymorphic_call_targets_hash ();
2966 }
2967
2968 /* Record about how many calls would benefit from given type to be final. */
2969
2970 struct odr_type_warn_count
2971 {
2972 tree type;
2973 int count;
2974 profile_count dyn_count;
2975 };
2976
2977 /* Record about how many calls would benefit from given method to be final. */
2978
2979 struct decl_warn_count
2980 {
2981 tree decl;
2982 int count;
2983 profile_count dyn_count;
2984 };
2985
2986 /* Information about type and decl warnings. */
2987
2988 class final_warning_record
2989 {
2990 public:
2991 /* If needed grow type_warnings vector and initialize new decl_warn_count
2992 to have dyn_count set to profile_count::zero (). */
2993 void grow_type_warnings (unsigned newlen);
2994
2995 profile_count dyn_count;
2996 auto_vec<odr_type_warn_count> type_warnings;
2997 hash_map<tree, decl_warn_count> decl_warnings;
2998 };
2999
3000 void
grow_type_warnings(unsigned newlen)3001 final_warning_record::grow_type_warnings (unsigned newlen)
3002 {
3003 unsigned len = type_warnings.length ();
3004 if (newlen > len)
3005 {
3006 type_warnings.safe_grow_cleared (newlen, true);
3007 for (unsigned i = len; i < newlen; i++)
3008 type_warnings[i].dyn_count = profile_count::zero ();
3009 }
3010 }
3011
3012 class final_warning_record *final_warning_records;
3013
3014 /* Return vector containing possible targets of polymorphic call of type
3015 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3016 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3017 OTR_TYPE and include their virtual method. This is useful for types
3018 possibly in construction or destruction where the virtual table may
3019 temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
3020 us to walk the inheritance graph for all derivations.
3021
3022 If COMPLETEP is non-NULL, store true if the list is complete.
3023 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3024 in the target cache. If user needs to visit every target list
3025 just once, it can memoize them.
3026
3027 If SPECULATIVE is set, the list will not contain targets that
3028 are not speculatively taken.
3029
3030 Returned vector is placed into cache. It is NOT caller's responsibility
3031 to free it. The vector can be freed on cgraph_remove_node call if
3032 the particular node is a virtual function present in the cache. */
3033
3034 vec <cgraph_node *>
possible_polymorphic_call_targets(tree otr_type,HOST_WIDE_INT otr_token,ipa_polymorphic_call_context context,bool * completep,void ** cache_token,bool speculative)3035 possible_polymorphic_call_targets (tree otr_type,
3036 HOST_WIDE_INT otr_token,
3037 ipa_polymorphic_call_context context,
3038 bool *completep,
3039 void **cache_token,
3040 bool speculative)
3041 {
3042 static struct cgraph_node_hook_list *node_removal_hook_holder;
3043 vec <cgraph_node *> nodes = vNULL;
3044 auto_vec <tree, 8> bases_to_consider;
3045 odr_type type, outer_type;
3046 polymorphic_call_target_d key;
3047 polymorphic_call_target_d **slot;
3048 unsigned int i;
3049 tree binfo, target;
3050 bool complete;
3051 bool can_refer = false;
3052 bool skipped = false;
3053
3054 otr_type = TYPE_MAIN_VARIANT (otr_type);
3055
3056 /* If ODR is not initialized or the context is invalid, return empty
3057 incomplete list. */
3058 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3059 {
3060 if (completep)
3061 *completep = context.invalid;
3062 if (cache_token)
3063 *cache_token = NULL;
3064 return nodes;
3065 }
3066
3067 /* Do not bother to compute speculative info when user do not asks for it. */
3068 if (!speculative || !context.speculative_outer_type)
3069 context.clear_speculation ();
3070
3071 type = get_odr_type (otr_type, true);
3072
3073 /* Recording type variants would waste results cache. */
3074 gcc_assert (!context.outer_type
3075 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3076
3077 /* Look up the outer class type we want to walk.
3078 If we fail to do so, the context is invalid. */
3079 if ((context.outer_type || context.speculative_outer_type)
3080 && !context.restrict_to_inner_class (otr_type))
3081 {
3082 if (completep)
3083 *completep = true;
3084 if (cache_token)
3085 *cache_token = NULL;
3086 return nodes;
3087 }
3088 gcc_assert (!context.invalid);
3089
3090 /* Check that restrict_to_inner_class kept the main variant. */
3091 gcc_assert (!context.outer_type
3092 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3093
3094 /* We canonicalize our query, so we do not need extra hashtable entries. */
3095
3096 /* Without outer type, we have no use for offset. Just do the
3097 basic search from inner type. */
3098 if (!context.outer_type)
3099 context.clear_outer_type (otr_type);
3100 /* We need to update our hierarchy if the type does not exist. */
3101 outer_type = get_odr_type (context.outer_type, true);
3102 /* If the type is complete, there are no derivations. */
3103 if (TYPE_FINAL_P (outer_type->type))
3104 context.maybe_derived_type = false;
3105
3106 /* Initialize query cache. */
3107 if (!cached_polymorphic_call_targets)
3108 {
3109 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3110 polymorphic_call_target_hash
3111 = new polymorphic_call_target_hash_type (23);
3112 if (!node_removal_hook_holder)
3113 {
3114 node_removal_hook_holder =
3115 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3116 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3117 NULL);
3118 }
3119 }
3120
3121 if (in_lto_p)
3122 {
3123 if (context.outer_type != otr_type)
3124 context.outer_type
3125 = get_odr_type (context.outer_type, true)->type;
3126 if (context.speculative_outer_type)
3127 context.speculative_outer_type
3128 = get_odr_type (context.speculative_outer_type, true)->type;
3129 }
3130
3131 /* Look up cached answer. */
3132 key.type = type;
3133 key.otr_token = otr_token;
3134 key.speculative = speculative;
3135 key.context = context;
3136 key.n_odr_types = odr_types.length ();
3137 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3138 if (cache_token)
3139 *cache_token = (void *)*slot;
3140 if (*slot)
3141 {
3142 if (completep)
3143 *completep = (*slot)->complete;
3144 if ((*slot)->type_warning && final_warning_records)
3145 {
3146 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3147 if (!final_warning_records->type_warnings
3148 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3149 final_warning_records->type_warnings
3150 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3151 if (final_warning_records->dyn_count > 0)
3152 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3153 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3154 + final_warning_records->dyn_count;
3155 }
3156 if (!speculative && (*slot)->decl_warning && final_warning_records)
3157 {
3158 struct decl_warn_count *c =
3159 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3160 c->count++;
3161 if (final_warning_records->dyn_count > 0)
3162 c->dyn_count += final_warning_records->dyn_count;
3163 }
3164 return (*slot)->targets;
3165 }
3166
3167 complete = true;
3168
3169 /* Do actual search. */
3170 timevar_push (TV_IPA_VIRTUAL_CALL);
3171 *slot = XCNEW (polymorphic_call_target_d);
3172 if (cache_token)
3173 *cache_token = (void *)*slot;
3174 (*slot)->type = type;
3175 (*slot)->otr_token = otr_token;
3176 (*slot)->context = context;
3177 (*slot)->speculative = speculative;
3178
3179 hash_set<tree> inserted;
3180 hash_set<tree> matched_vtables;
3181
3182 /* First insert targets we speculatively identified as likely. */
3183 if (context.speculative_outer_type)
3184 {
3185 odr_type speculative_outer_type;
3186 bool speculation_complete = true;
3187 bool check_derived_types = false;
3188
3189 /* First insert target from type itself and check if it may have
3190 derived types. */
3191 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3192 if (TYPE_FINAL_P (speculative_outer_type->type))
3193 context.speculative_maybe_derived_type = false;
3194 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3195 context.speculative_offset, otr_type);
3196 if (binfo)
3197 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3198 &can_refer);
3199 else
3200 target = NULL;
3201
3202 /* In the case we get complete method, we don't need
3203 to walk derivations. */
3204 if (target && DECL_FINAL_P (target))
3205 context.speculative_maybe_derived_type = false;
3206 if (check_derived_types
3207 ? type_or_derived_type_possibly_instantiated_p
3208 (speculative_outer_type)
3209 : type_possibly_instantiated_p (speculative_outer_type->type))
3210 maybe_record_node (nodes, target, &inserted, can_refer,
3211 &speculation_complete);
3212 if (binfo)
3213 matched_vtables.add (BINFO_VTABLE (binfo));
3214
3215
3216 /* Next walk recursively all derived types. */
3217 if (context.speculative_maybe_derived_type)
3218 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3219 possible_polymorphic_call_targets_1 (nodes, &inserted,
3220 &matched_vtables,
3221 otr_type,
3222 speculative_outer_type->derived_types[i],
3223 otr_token, speculative_outer_type->type,
3224 context.speculative_offset,
3225 &speculation_complete,
3226 bases_to_consider,
3227 false);
3228 }
3229
3230 if (!speculative || !nodes.length ())
3231 {
3232 bool check_derived_types = false;
3233 /* First see virtual method of type itself. */
3234 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3235 context.offset, otr_type);
3236 if (binfo)
3237 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3238 &can_refer);
3239 else
3240 {
3241 gcc_assert (odr_violation_reported);
3242 target = NULL;
3243 }
3244
3245 /* Destructors are never called through construction virtual tables,
3246 because the type is always known. */
3247 if (target && DECL_CXX_DESTRUCTOR_P (target))
3248 context.maybe_in_construction = false;
3249
3250 /* In the case we get complete method, we don't need
3251 to walk derivations. */
3252 if (target && DECL_FINAL_P (target))
3253 {
3254 check_derived_types = true;
3255 context.maybe_derived_type = false;
3256 }
3257
3258 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3259 if (check_derived_types
3260 ? type_or_derived_type_possibly_instantiated_p (outer_type)
3261 : type_possibly_instantiated_p (outer_type->type))
3262 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3263 else
3264 skipped = true;
3265
3266 if (binfo)
3267 matched_vtables.add (BINFO_VTABLE (binfo));
3268
3269 /* Next walk recursively all derived types. */
3270 if (context.maybe_derived_type)
3271 {
3272 for (i = 0; i < outer_type->derived_types.length(); i++)
3273 possible_polymorphic_call_targets_1 (nodes, &inserted,
3274 &matched_vtables,
3275 otr_type,
3276 outer_type->derived_types[i],
3277 otr_token, outer_type->type,
3278 context.offset, &complete,
3279 bases_to_consider,
3280 context.maybe_in_construction);
3281
3282 if (!outer_type->all_derivations_known)
3283 {
3284 if (!speculative && final_warning_records
3285 && nodes.length () == 1
3286 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3287 {
3288 if (complete
3289 && warn_suggest_final_types
3290 && !outer_type->derived_types.length ())
3291 {
3292 final_warning_records->grow_type_warnings
3293 (outer_type->id);
3294 final_warning_records->type_warnings[outer_type->id].count++;
3295 if (!final_warning_records->type_warnings
3296 [outer_type->id].dyn_count.initialized_p ())
3297 final_warning_records->type_warnings
3298 [outer_type->id].dyn_count = profile_count::zero ();
3299 final_warning_records->type_warnings[outer_type->id].dyn_count
3300 += final_warning_records->dyn_count;
3301 final_warning_records->type_warnings[outer_type->id].type
3302 = outer_type->type;
3303 (*slot)->type_warning = outer_type->id + 1;
3304 }
3305 if (complete
3306 && warn_suggest_final_methods
3307 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3308 outer_type->type))
3309 {
3310 bool existed;
3311 struct decl_warn_count &c =
3312 final_warning_records->decl_warnings.get_or_insert
3313 (nodes[0]->decl, &existed);
3314
3315 if (existed)
3316 {
3317 c.count++;
3318 c.dyn_count += final_warning_records->dyn_count;
3319 }
3320 else
3321 {
3322 c.count = 1;
3323 c.dyn_count = final_warning_records->dyn_count;
3324 c.decl = nodes[0]->decl;
3325 }
3326 (*slot)->decl_warning = nodes[0]->decl;
3327 }
3328 }
3329 complete = false;
3330 }
3331 }
3332
3333 if (!speculative)
3334 {
3335 /* Destructors are never called through construction virtual tables,
3336 because the type is always known. One of entries may be
3337 cxa_pure_virtual so look to at least two of them. */
3338 if (context.maybe_in_construction)
3339 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3340 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3341 context.maybe_in_construction = false;
3342 if (context.maybe_in_construction)
3343 {
3344 if (type != outer_type
3345 && (!skipped
3346 || (context.maybe_derived_type
3347 && !type_all_derivations_known_p (outer_type->type))))
3348 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3349 context.offset, nodes, &inserted,
3350 &matched_vtables, &complete);
3351 if (skipped)
3352 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3353 for (i = 0; i < bases_to_consider.length(); i++)
3354 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3355 }
3356 }
3357 }
3358
3359 (*slot)->targets = nodes;
3360 (*slot)->complete = complete;
3361 (*slot)->n_odr_types = odr_types.length ();
3362 if (completep)
3363 *completep = complete;
3364
3365 timevar_pop (TV_IPA_VIRTUAL_CALL);
3366 return nodes;
3367 }
3368
3369 bool
add_decl_warning(const tree & key ATTRIBUTE_UNUSED,const decl_warn_count & value,vec<const decl_warn_count * > * vec)3370 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3371 vec<const decl_warn_count*> *vec)
3372 {
3373 vec->safe_push (&value);
3374 return true;
3375 }
3376
3377 /* Dump target list TARGETS into FILE. */
3378
3379 static void
dump_targets(FILE * f,vec<cgraph_node * > targets,bool verbose)3380 dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3381 {
3382 unsigned int i;
3383
3384 for (i = 0; i < targets.length (); i++)
3385 {
3386 char *name = NULL;
3387 if (in_lto_p)
3388 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3389 fprintf (f, " %s", name ? name : targets[i]->dump_name ());
3390 if (in_lto_p)
3391 free (name);
3392 if (!targets[i]->definition)
3393 fprintf (f, " (no definition%s)",
3394 DECL_DECLARED_INLINE_P (targets[i]->decl)
3395 ? " inline" : "");
3396 /* With many targets for every call polymorphic dumps are going to
3397 be quadratic in size. */
3398 if (i > 10 && !verbose)
3399 {
3400 fprintf (f, " ... and %i more targets\n", targets.length () - i);
3401 return;
3402 }
3403 }
3404 fprintf (f, "\n");
3405 }
3406
3407 /* Dump all possible targets of a polymorphic call. */
3408
3409 void
dump_possible_polymorphic_call_targets(FILE * f,tree otr_type,HOST_WIDE_INT otr_token,const ipa_polymorphic_call_context & ctx,bool verbose)3410 dump_possible_polymorphic_call_targets (FILE *f,
3411 tree otr_type,
3412 HOST_WIDE_INT otr_token,
3413 const ipa_polymorphic_call_context &ctx,
3414 bool verbose)
3415 {
3416 vec <cgraph_node *> targets;
3417 bool final;
3418 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3419 unsigned int len;
3420
3421 if (!type)
3422 return;
3423 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3424 ctx,
3425 &final, NULL, false);
3426 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3427 print_generic_expr (f, type->type, TDF_SLIM);
3428 fprintf (f, " token %i\n", (int)otr_token);
3429
3430 ctx.dump (f);
3431
3432 fprintf (f, " %s%s%s%s\n ",
3433 final ? "This is a complete list." :
3434 "This is partial list; extra targets may be defined in other units.",
3435 ctx.maybe_in_construction ? " (base types included)" : "",
3436 ctx.maybe_derived_type ? " (derived types included)" : "",
3437 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3438 len = targets.length ();
3439 dump_targets (f, targets, verbose);
3440
3441 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3442 ctx,
3443 &final, NULL, true);
3444 if (targets.length () != len)
3445 {
3446 fprintf (f, " Speculative targets:");
3447 dump_targets (f, targets, verbose);
3448 }
3449 /* Ugly: during callgraph construction the target cache may get populated
3450 before all targets are found. While this is harmless (because all local
3451 types are discovered and only in those case we devirtualize fully and we
3452 don't do speculative devirtualization before IPA stage) it triggers
3453 assert here when dumping at that stage also populates the case with
3454 speculative targets. Quietly ignore this. */
3455 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3456 fprintf (f, "\n");
3457 }
3458
3459
3460 /* Return true if N can be possibly target of a polymorphic call of
3461 OTR_TYPE/OTR_TOKEN. */
3462
3463 bool
possible_polymorphic_call_target_p(tree otr_type,HOST_WIDE_INT otr_token,const ipa_polymorphic_call_context & ctx,struct cgraph_node * n)3464 possible_polymorphic_call_target_p (tree otr_type,
3465 HOST_WIDE_INT otr_token,
3466 const ipa_polymorphic_call_context &ctx,
3467 struct cgraph_node *n)
3468 {
3469 vec <cgraph_node *> targets;
3470 unsigned int i;
3471 bool final;
3472
3473 if (fndecl_built_in_p (n->decl, BUILT_IN_UNREACHABLE)
3474 || fndecl_built_in_p (n->decl, BUILT_IN_TRAP))
3475 return true;
3476
3477 if (is_cxa_pure_virtual_p (n->decl))
3478 return true;
3479
3480 if (!odr_hash)
3481 return true;
3482 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3483 for (i = 0; i < targets.length (); i++)
3484 if (n->semantically_equivalent_p (targets[i]))
3485 return true;
3486
3487 /* At a moment we allow middle end to dig out new external declarations
3488 as a targets of polymorphic calls. */
3489 if (!final && !n->definition)
3490 return true;
3491 return false;
3492 }
3493
3494
3495
3496 /* Return true if N can be possibly target of a polymorphic call of
3497 OBJ_TYPE_REF expression REF in STMT. */
3498
3499 bool
possible_polymorphic_call_target_p(tree ref,gimple * stmt,struct cgraph_node * n)3500 possible_polymorphic_call_target_p (tree ref,
3501 gimple *stmt,
3502 struct cgraph_node *n)
3503 {
3504 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3505 tree call_fn = gimple_call_fn (stmt);
3506
3507 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3508 tree_to_uhwi
3509 (OBJ_TYPE_REF_TOKEN (call_fn)),
3510 context,
3511 n);
3512 }
3513
3514
3515 /* After callgraph construction new external nodes may appear.
3516 Add them into the graph. */
3517
3518 void
update_type_inheritance_graph(void)3519 update_type_inheritance_graph (void)
3520 {
3521 struct cgraph_node *n;
3522
3523 if (!odr_hash)
3524 return;
3525 free_polymorphic_call_targets_hash ();
3526 timevar_push (TV_IPA_INHERITANCE);
3527 /* We reconstruct the graph starting from types of all methods seen in the
3528 unit. */
3529 FOR_EACH_FUNCTION (n)
3530 if (DECL_VIRTUAL_P (n->decl)
3531 && !n->definition
3532 && n->real_symbol_p ())
3533 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3534 timevar_pop (TV_IPA_INHERITANCE);
3535 }
3536
3537
3538 /* Return true if N looks like likely target of a polymorphic call.
3539 Rule out cxa_pure_virtual, noreturns, function declared cold and
3540 other obvious cases. */
3541
3542 bool
likely_target_p(struct cgraph_node * n)3543 likely_target_p (struct cgraph_node *n)
3544 {
3545 int flags;
3546 /* cxa_pure_virtual and similar things are not likely. */
3547 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3548 return false;
3549 flags = flags_from_decl_or_type (n->decl);
3550 if (flags & ECF_NORETURN)
3551 return false;
3552 if (lookup_attribute ("cold",
3553 DECL_ATTRIBUTES (n->decl)))
3554 return false;
3555 if (n->frequency < NODE_FREQUENCY_NORMAL)
3556 return false;
3557 /* If there are no live virtual tables referring the target,
3558 the only way the target can be called is an instance coming from other
3559 compilation unit; speculative devirtualization is built around an
3560 assumption that won't happen. */
3561 if (!referenced_from_vtable_p (n))
3562 return false;
3563 return true;
3564 }
3565
3566 /* Compare type warning records P1 and P2 and choose one with larger count;
3567 helper for qsort. */
3568
3569 static int
type_warning_cmp(const void * p1,const void * p2)3570 type_warning_cmp (const void *p1, const void *p2)
3571 {
3572 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3573 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3574
3575 if (t1->dyn_count < t2->dyn_count)
3576 return 1;
3577 if (t1->dyn_count > t2->dyn_count)
3578 return -1;
3579 return t2->count - t1->count;
3580 }
3581
3582 /* Compare decl warning records P1 and P2 and choose one with larger count;
3583 helper for qsort. */
3584
3585 static int
decl_warning_cmp(const void * p1,const void * p2)3586 decl_warning_cmp (const void *p1, const void *p2)
3587 {
3588 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3589 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3590
3591 if (t1->dyn_count < t2->dyn_count)
3592 return 1;
3593 if (t1->dyn_count > t2->dyn_count)
3594 return -1;
3595 return t2->count - t1->count;
3596 }
3597
3598
3599 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3600 context CTX. */
3601
3602 struct cgraph_node *
try_speculative_devirtualization(tree otr_type,HOST_WIDE_INT otr_token,ipa_polymorphic_call_context ctx)3603 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3604 ipa_polymorphic_call_context ctx)
3605 {
3606 vec <cgraph_node *>targets
3607 = possible_polymorphic_call_targets
3608 (otr_type, otr_token, ctx, NULL, NULL, true);
3609 unsigned int i;
3610 struct cgraph_node *likely_target = NULL;
3611
3612 for (i = 0; i < targets.length (); i++)
3613 if (likely_target_p (targets[i]))
3614 {
3615 if (likely_target)
3616 return NULL;
3617 likely_target = targets[i];
3618 }
3619 if (!likely_target
3620 ||!likely_target->definition
3621 || DECL_EXTERNAL (likely_target->decl))
3622 return NULL;
3623
3624 /* Don't use an implicitly-declared destructor (c++/58678). */
3625 struct cgraph_node *non_thunk_target
3626 = likely_target->function_symbol ();
3627 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3628 return NULL;
3629 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3630 && likely_target->can_be_discarded_p ())
3631 return NULL;
3632 return likely_target;
3633 }
3634
3635 /* The ipa-devirt pass.
3636 When polymorphic call has only one likely target in the unit,
3637 turn it into a speculative call. */
3638
3639 static unsigned int
ipa_devirt(void)3640 ipa_devirt (void)
3641 {
3642 struct cgraph_node *n;
3643 hash_set<void *> bad_call_targets;
3644 struct cgraph_edge *e;
3645
3646 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3647 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3648 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3649 int ndropped = 0;
3650
3651 if (!odr_types_ptr)
3652 return 0;
3653
3654 if (dump_file)
3655 dump_type_inheritance_graph (dump_file);
3656
3657 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3658 This is implemented by setting up final_warning_records that are updated
3659 by get_polymorphic_call_targets.
3660 We need to clear cache in this case to trigger recomputation of all
3661 entries. */
3662 if (warn_suggest_final_methods || warn_suggest_final_types)
3663 {
3664 final_warning_records = new (final_warning_record);
3665 final_warning_records->dyn_count = profile_count::zero ();
3666 final_warning_records->grow_type_warnings (odr_types.length ());
3667 free_polymorphic_call_targets_hash ();
3668 }
3669
3670 FOR_EACH_DEFINED_FUNCTION (n)
3671 {
3672 bool update = false;
3673 if (!opt_for_fn (n->decl, flag_devirtualize))
3674 continue;
3675 if (dump_file && n->indirect_calls)
3676 fprintf (dump_file, "\n\nProcesing function %s\n",
3677 n->dump_name ());
3678 for (e = n->indirect_calls; e; e = e->next_callee)
3679 if (e->indirect_info->polymorphic)
3680 {
3681 struct cgraph_node *likely_target = NULL;
3682 void *cache_token;
3683 bool final;
3684
3685 if (final_warning_records)
3686 final_warning_records->dyn_count = e->count.ipa ();
3687
3688 vec <cgraph_node *>targets
3689 = possible_polymorphic_call_targets
3690 (e, &final, &cache_token, true);
3691 unsigned int i;
3692
3693 /* Trigger warnings by calculating non-speculative targets. */
3694 if (warn_suggest_final_methods || warn_suggest_final_types)
3695 possible_polymorphic_call_targets (e);
3696
3697 if (dump_file)
3698 dump_possible_polymorphic_call_targets
3699 (dump_file, e, (dump_flags & TDF_DETAILS));
3700
3701 npolymorphic++;
3702
3703 /* See if the call can be devirtualized by means of ipa-prop's
3704 polymorphic call context propagation. If not, we can just
3705 forget about this call being polymorphic and avoid some heavy
3706 lifting in remove_unreachable_nodes that will otherwise try to
3707 keep all possible targets alive until inlining and in the inliner
3708 itself.
3709
3710 This may need to be revisited once we add further ways to use
3711 the may edges, but it is a reasonable thing to do right now. */
3712
3713 if ((e->indirect_info->param_index == -1
3714 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3715 && e->indirect_info->vptr_changed))
3716 && !flag_ltrans_devirtualize)
3717 {
3718 e->indirect_info->polymorphic = false;
3719 ndropped++;
3720 if (dump_file)
3721 fprintf (dump_file, "Dropping polymorphic call info;"
3722 " it cannot be used by ipa-prop\n");
3723 }
3724
3725 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3726 continue;
3727
3728 if (!e->maybe_hot_p ())
3729 {
3730 if (dump_file)
3731 fprintf (dump_file, "Call is cold\n\n");
3732 ncold++;
3733 continue;
3734 }
3735 if (e->speculative)
3736 {
3737 if (dump_file)
3738 fprintf (dump_file, "Call is already speculated\n\n");
3739 nspeculated++;
3740
3741 /* When dumping see if we agree with speculation. */
3742 if (!dump_file)
3743 continue;
3744 }
3745 if (bad_call_targets.contains (cache_token))
3746 {
3747 if (dump_file)
3748 fprintf (dump_file, "Target list is known to be useless\n\n");
3749 nmultiple++;
3750 continue;
3751 }
3752 for (i = 0; i < targets.length (); i++)
3753 if (likely_target_p (targets[i]))
3754 {
3755 if (likely_target)
3756 {
3757 likely_target = NULL;
3758 if (dump_file)
3759 fprintf (dump_file, "More than one likely target\n\n");
3760 nmultiple++;
3761 break;
3762 }
3763 likely_target = targets[i];
3764 }
3765 if (!likely_target)
3766 {
3767 bad_call_targets.add (cache_token);
3768 continue;
3769 }
3770 /* This is reached only when dumping; check if we agree or disagree
3771 with the speculation. */
3772 if (e->speculative)
3773 {
3774 bool found = e->speculative_call_for_target (likely_target);
3775 if (found)
3776 {
3777 fprintf (dump_file, "We agree with speculation\n\n");
3778 nok++;
3779 }
3780 else
3781 {
3782 fprintf (dump_file, "We disagree with speculation\n\n");
3783 nwrong++;
3784 }
3785 continue;
3786 }
3787 if (!likely_target->definition)
3788 {
3789 if (dump_file)
3790 fprintf (dump_file, "Target is not a definition\n\n");
3791 nnotdefined++;
3792 continue;
3793 }
3794 /* Do not introduce new references to external symbols. While we
3795 can handle these just well, it is common for programs to
3796 incorrectly with headers defining methods they are linked
3797 with. */
3798 if (DECL_EXTERNAL (likely_target->decl))
3799 {
3800 if (dump_file)
3801 fprintf (dump_file, "Target is external\n\n");
3802 nexternal++;
3803 continue;
3804 }
3805 /* Don't use an implicitly-declared destructor (c++/58678). */
3806 struct cgraph_node *non_thunk_target
3807 = likely_target->function_symbol ();
3808 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3809 {
3810 if (dump_file)
3811 fprintf (dump_file, "Target is artificial\n\n");
3812 nartificial++;
3813 continue;
3814 }
3815 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3816 && likely_target->can_be_discarded_p ())
3817 {
3818 if (dump_file)
3819 fprintf (dump_file, "Target is overwritable\n\n");
3820 noverwritable++;
3821 continue;
3822 }
3823 else if (dbg_cnt (devirt))
3824 {
3825 if (dump_enabled_p ())
3826 {
3827 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3828 "speculatively devirtualizing call "
3829 "in %s to %s\n",
3830 n->dump_name (),
3831 likely_target->dump_name ());
3832 }
3833 if (!likely_target->can_be_discarded_p ())
3834 {
3835 cgraph_node *alias;
3836 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3837 if (alias)
3838 likely_target = alias;
3839 }
3840 nconverted++;
3841 update = true;
3842 e->make_speculative
3843 (likely_target, e->count.apply_scale (8, 10));
3844 }
3845 }
3846 if (update)
3847 ipa_update_overall_fn_summary (n);
3848 }
3849 if (warn_suggest_final_methods || warn_suggest_final_types)
3850 {
3851 if (warn_suggest_final_types)
3852 {
3853 final_warning_records->type_warnings.qsort (type_warning_cmp);
3854 for (unsigned int i = 0;
3855 i < final_warning_records->type_warnings.length (); i++)
3856 if (final_warning_records->type_warnings[i].count)
3857 {
3858 tree type = final_warning_records->type_warnings[i].type;
3859 int count = final_warning_records->type_warnings[i].count;
3860 profile_count dyn_count
3861 = final_warning_records->type_warnings[i].dyn_count;
3862
3863 if (!(dyn_count > 0))
3864 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3865 OPT_Wsuggest_final_types, count,
3866 "Declaring type %qD final "
3867 "would enable devirtualization of %i call",
3868 "Declaring type %qD final "
3869 "would enable devirtualization of %i calls",
3870 type,
3871 count);
3872 else
3873 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3874 OPT_Wsuggest_final_types, count,
3875 "Declaring type %qD final "
3876 "would enable devirtualization of %i call "
3877 "executed %lli times",
3878 "Declaring type %qD final "
3879 "would enable devirtualization of %i calls "
3880 "executed %lli times",
3881 type,
3882 count,
3883 (long long) dyn_count.to_gcov_type ());
3884 }
3885 }
3886
3887 if (warn_suggest_final_methods)
3888 {
3889 auto_vec<const decl_warn_count*> decl_warnings_vec;
3890
3891 final_warning_records->decl_warnings.traverse
3892 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3893 decl_warnings_vec.qsort (decl_warning_cmp);
3894 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3895 {
3896 tree decl = decl_warnings_vec[i]->decl;
3897 int count = decl_warnings_vec[i]->count;
3898 profile_count dyn_count
3899 = decl_warnings_vec[i]->dyn_count;
3900
3901 if (!(dyn_count > 0))
3902 if (DECL_CXX_DESTRUCTOR_P (decl))
3903 warning_n (DECL_SOURCE_LOCATION (decl),
3904 OPT_Wsuggest_final_methods, count,
3905 "Declaring virtual destructor of %qD final "
3906 "would enable devirtualization of %i call",
3907 "Declaring virtual destructor of %qD final "
3908 "would enable devirtualization of %i calls",
3909 DECL_CONTEXT (decl), count);
3910 else
3911 warning_n (DECL_SOURCE_LOCATION (decl),
3912 OPT_Wsuggest_final_methods, count,
3913 "Declaring method %qD final "
3914 "would enable devirtualization of %i call",
3915 "Declaring method %qD final "
3916 "would enable devirtualization of %i calls",
3917 decl, count);
3918 else if (DECL_CXX_DESTRUCTOR_P (decl))
3919 warning_n (DECL_SOURCE_LOCATION (decl),
3920 OPT_Wsuggest_final_methods, count,
3921 "Declaring virtual destructor of %qD final "
3922 "would enable devirtualization of %i call "
3923 "executed %lli times",
3924 "Declaring virtual destructor of %qD final "
3925 "would enable devirtualization of %i calls "
3926 "executed %lli times",
3927 DECL_CONTEXT (decl), count,
3928 (long long)dyn_count.to_gcov_type ());
3929 else
3930 warning_n (DECL_SOURCE_LOCATION (decl),
3931 OPT_Wsuggest_final_methods, count,
3932 "Declaring method %qD final "
3933 "would enable devirtualization of %i call "
3934 "executed %lli times",
3935 "Declaring method %qD final "
3936 "would enable devirtualization of %i calls "
3937 "executed %lli times",
3938 decl, count,
3939 (long long)dyn_count.to_gcov_type ());
3940 }
3941 }
3942
3943 delete (final_warning_records);
3944 final_warning_records = 0;
3945 }
3946
3947 if (dump_file)
3948 fprintf (dump_file,
3949 "%i polymorphic calls, %i devirtualized,"
3950 " %i speculatively devirtualized, %i cold\n"
3951 "%i have multiple targets, %i overwritable,"
3952 " %i already speculated (%i agree, %i disagree),"
3953 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3954 npolymorphic, ndevirtualized, nconverted, ncold,
3955 nmultiple, noverwritable, nspeculated, nok, nwrong,
3956 nexternal, nnotdefined, nartificial, ndropped);
3957 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3958 }
3959
3960 namespace {
3961
3962 const pass_data pass_data_ipa_devirt =
3963 {
3964 IPA_PASS, /* type */
3965 "devirt", /* name */
3966 OPTGROUP_NONE, /* optinfo_flags */
3967 TV_IPA_DEVIRT, /* tv_id */
3968 0, /* properties_required */
3969 0, /* properties_provided */
3970 0, /* properties_destroyed */
3971 0, /* todo_flags_start */
3972 ( TODO_dump_symtab ), /* todo_flags_finish */
3973 };
3974
3975 class pass_ipa_devirt : public ipa_opt_pass_d
3976 {
3977 public:
pass_ipa_devirt(gcc::context * ctxt)3978 pass_ipa_devirt (gcc::context *ctxt)
3979 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3980 NULL, /* generate_summary */
3981 NULL, /* write_summary */
3982 NULL, /* read_summary */
3983 NULL, /* write_optimization_summary */
3984 NULL, /* read_optimization_summary */
3985 NULL, /* stmt_fixup */
3986 0, /* function_transform_todo_flags_start */
3987 NULL, /* function_transform */
3988 NULL) /* variable_transform */
3989 {}
3990
3991 /* opt_pass methods: */
gate(function *)3992 virtual bool gate (function *)
3993 {
3994 /* In LTO, always run the IPA passes and decide on function basis if the
3995 pass is enabled. */
3996 if (in_lto_p)
3997 return true;
3998 return (flag_devirtualize
3999 && (flag_devirtualize_speculatively
4000 || (warn_suggest_final_methods
4001 || warn_suggest_final_types))
4002 && optimize);
4003 }
4004
execute(function *)4005 virtual unsigned int execute (function *) { return ipa_devirt (); }
4006
4007 }; // class pass_ipa_devirt
4008
4009 } // anon namespace
4010
4011 ipa_opt_pass_d *
make_pass_ipa_devirt(gcc::context * ctxt)4012 make_pass_ipa_devirt (gcc::context *ctxt)
4013 {
4014 return new pass_ipa_devirt (ctxt);
4015 }
4016
4017 /* Print ODR name of a TYPE if available.
4018 Use demangler when option DEMANGLE is used. */
4019
4020 DEBUG_FUNCTION void
debug_tree_odr_name(tree type,bool demangle)4021 debug_tree_odr_name (tree type, bool demangle)
4022 {
4023 const char *odr = get_odr_name_for_type (type);
4024 if (demangle)
4025 {
4026 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4027 odr = cplus_demangle (odr, opts);
4028 }
4029
4030 fprintf (stderr, "%s\n", odr);
4031 }
4032
4033 /* Register ODR enum so we later stream record about its values. */
4034
4035 void
register_odr_enum(tree t)4036 register_odr_enum (tree t)
4037 {
4038 if (flag_lto)
4039 vec_safe_push (odr_enums, t);
4040 }
4041
4042 /* Write ODR enums to LTO stream file. */
4043
4044 static void
ipa_odr_summary_write(void)4045 ipa_odr_summary_write (void)
4046 {
4047 if (!odr_enums && !odr_enum_map)
4048 return;
4049 struct output_block *ob = create_output_block (LTO_section_odr_types);
4050 unsigned int i;
4051 tree t;
4052
4053 if (odr_enums)
4054 {
4055 streamer_write_uhwi (ob, odr_enums->length ());
4056
4057 /* For every ODR enum stream out
4058 - its ODR name
4059 - number of values,
4060 - value names and constant their represent
4061 - bitpack of locations so we can do good diagnostics. */
4062 FOR_EACH_VEC_ELT (*odr_enums, i, t)
4063 {
4064 streamer_write_string (ob, ob->main_stream,
4065 IDENTIFIER_POINTER
4066 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))),
4067 true);
4068
4069 int n = 0;
4070 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4071 n++;
4072 streamer_write_uhwi (ob, n);
4073 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4074 {
4075 streamer_write_string (ob, ob->main_stream,
4076 IDENTIFIER_POINTER (TREE_PURPOSE (e)),
4077 true);
4078 streamer_write_wide_int (ob,
4079 wi::to_wide (DECL_INITIAL
4080 (TREE_VALUE (e))));
4081 }
4082
4083 bitpack_d bp = bitpack_create (ob->main_stream);
4084 lto_output_location (ob, &bp, DECL_SOURCE_LOCATION (TYPE_NAME (t)));
4085 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4086 lto_output_location (ob, &bp,
4087 DECL_SOURCE_LOCATION (TREE_VALUE (e)));
4088 streamer_write_bitpack (&bp);
4089 }
4090 vec_free (odr_enums);
4091 odr_enums = NULL;
4092 }
4093 /* During LTO incremental linking we already have streamed in types. */
4094 else if (odr_enum_map)
4095 {
4096 gcc_checking_assert (!odr_enums);
4097 streamer_write_uhwi (ob, odr_enum_map->elements ());
4098
4099 hash_map<nofree_string_hash, odr_enum>::iterator iter
4100 = odr_enum_map->begin ();
4101 for (; iter != odr_enum_map->end (); ++iter)
4102 {
4103 odr_enum &this_enum = (*iter).second;
4104 streamer_write_string (ob, ob->main_stream, (*iter).first, true);
4105
4106 streamer_write_uhwi (ob, this_enum.vals.length ());
4107 for (unsigned j = 0; j < this_enum.vals.length (); j++)
4108 {
4109 streamer_write_string (ob, ob->main_stream,
4110 this_enum.vals[j].name, true);
4111 streamer_write_wide_int (ob, this_enum.vals[j].val);
4112 }
4113
4114 bitpack_d bp = bitpack_create (ob->main_stream);
4115 lto_output_location (ob, &bp, this_enum.locus);
4116 for (unsigned j = 0; j < this_enum.vals.length (); j++)
4117 lto_output_location (ob, &bp, this_enum.vals[j].locus);
4118 streamer_write_bitpack (&bp);
4119 }
4120
4121 delete odr_enum_map;
4122 obstack_free (&odr_enum_obstack, NULL);
4123 odr_enum_map = NULL;
4124 }
4125
4126 produce_asm (ob, NULL);
4127 destroy_output_block (ob);
4128 }
4129
4130 /* Write ODR enums from LTO stream file and warn on mismatches. */
4131
4132 static void
ipa_odr_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)4133 ipa_odr_read_section (struct lto_file_decl_data *file_data, const char *data,
4134 size_t len)
4135 {
4136 const struct lto_function_header *header
4137 = (const struct lto_function_header *) data;
4138 const int cfg_offset = sizeof (struct lto_function_header);
4139 const int main_offset = cfg_offset + header->cfg_size;
4140 const int string_offset = main_offset + header->main_size;
4141 class data_in *data_in;
4142
4143 lto_input_block ib ((const char *) data + main_offset, header->main_size,
4144 file_data->mode_table);
4145
4146 data_in
4147 = lto_data_in_create (file_data, (const char *) data + string_offset,
4148 header->string_size, vNULL);
4149 unsigned int n = streamer_read_uhwi (&ib);
4150
4151 if (!odr_enum_map)
4152 {
4153 gcc_obstack_init (&odr_enum_obstack);
4154 odr_enum_map = new (hash_map <nofree_string_hash, odr_enum>);
4155 }
4156
4157 for (unsigned i = 0; i < n; i++)
4158 {
4159 const char *rname = streamer_read_string (data_in, &ib);
4160 unsigned int nvals = streamer_read_uhwi (&ib);
4161 char *name;
4162
4163 obstack_grow (&odr_enum_obstack, rname, strlen (rname) + 1);
4164 name = XOBFINISH (&odr_enum_obstack, char *);
4165
4166 bool existed_p;
4167 class odr_enum &this_enum
4168 = odr_enum_map->get_or_insert (xstrdup (name), &existed_p);
4169
4170 /* If this is first time we see the enum, remember its definition. */
4171 if (!existed_p)
4172 {
4173 this_enum.vals.safe_grow_cleared (nvals, true);
4174 this_enum.warned = false;
4175 if (dump_file)
4176 fprintf (dump_file, "enum %s\n{\n", name);
4177 for (unsigned j = 0; j < nvals; j++)
4178 {
4179 const char *val_name = streamer_read_string (data_in, &ib);
4180 obstack_grow (&odr_enum_obstack, val_name, strlen (val_name) + 1);
4181 this_enum.vals[j].name = XOBFINISH (&odr_enum_obstack, char *);
4182 this_enum.vals[j].val = streamer_read_wide_int (&ib);
4183 if (dump_file)
4184 fprintf (dump_file, " %s = " HOST_WIDE_INT_PRINT_DEC ",\n",
4185 val_name, wi::fits_shwi_p (this_enum.vals[j].val)
4186 ? this_enum.vals[j].val.to_shwi () : -1);
4187 }
4188 bitpack_d bp = streamer_read_bitpack (&ib);
4189 stream_input_location (&this_enum.locus, &bp, data_in);
4190 for (unsigned j = 0; j < nvals; j++)
4191 stream_input_location (&this_enum.vals[j].locus, &bp, data_in);
4192 data_in->location_cache.apply_location_cache ();
4193 if (dump_file)
4194 fprintf (dump_file, "}\n");
4195 }
4196 /* If we already have definition, compare it with new one and output
4197 warnings if they differs. */
4198 else
4199 {
4200 int do_warning = -1;
4201 char *warn_name = NULL;
4202 wide_int warn_value = wi::zero (1);
4203
4204 if (dump_file)
4205 fprintf (dump_file, "Comparing enum %s\n", name);
4206
4207 /* Look for differences which we will warn about later once locations
4208 are streamed. */
4209 for (unsigned j = 0; j < nvals; j++)
4210 {
4211 const char *id = streamer_read_string (data_in, &ib);
4212 wide_int val = streamer_read_wide_int (&ib);
4213
4214 if (do_warning != -1 || j >= this_enum.vals.length ())
4215 continue;
4216 if (strcmp (id, this_enum.vals[j].name)
4217 || (val.get_precision() !=
4218 this_enum.vals[j].val.get_precision())
4219 || val != this_enum.vals[j].val)
4220 {
4221 warn_name = xstrdup (id);
4222 warn_value = val;
4223 do_warning = j;
4224 if (dump_file)
4225 fprintf (dump_file, " Different on entry %i\n", j);
4226 }
4227 }
4228
4229 /* Stream in locations, but do not apply them unless we are going
4230 to warn. */
4231 bitpack_d bp = streamer_read_bitpack (&ib);
4232 location_t locus;
4233
4234 stream_input_location (&locus, &bp, data_in);
4235
4236 /* Did we find a difference? */
4237 if (do_warning != -1 || nvals != this_enum.vals.length ())
4238 {
4239 data_in->location_cache.apply_location_cache ();
4240
4241 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4242 char *dmgname = cplus_demangle (name, opts);
4243 if (this_enum.warned
4244 || !warning_at (this_enum.locus,
4245 OPT_Wodr, "type %qs violates the "
4246 "C++ One Definition Rule",
4247 dmgname))
4248 do_warning = -1;
4249 else
4250 {
4251 this_enum.warned = true;
4252 if (do_warning == -1)
4253 inform (locus,
4254 "an enum with different number of values is defined"
4255 " in another translation unit");
4256 else if (warn_name)
4257 inform (locus,
4258 "an enum with different value name"
4259 " is defined in another translation unit");
4260 else
4261 inform (locus,
4262 "an enum with different values"
4263 " is defined in another translation unit");
4264 }
4265 }
4266 else
4267 data_in->location_cache.revert_location_cache ();
4268
4269 /* Finally look up for location of the actual value that diverged. */
4270 for (unsigned j = 0; j < nvals; j++)
4271 {
4272 location_t id_locus;
4273
4274 data_in->location_cache.revert_location_cache ();
4275 stream_input_location (&id_locus, &bp, data_in);
4276
4277 if ((int) j == do_warning)
4278 {
4279 data_in->location_cache.apply_location_cache ();
4280
4281 if (strcmp (warn_name, this_enum.vals[j].name))
4282 inform (this_enum.vals[j].locus,
4283 "name %qs differs from name %qs defined"
4284 " in another translation unit",
4285 this_enum.vals[j].name, warn_name);
4286 else if (this_enum.vals[j].val.get_precision() !=
4287 warn_value.get_precision())
4288 inform (this_enum.vals[j].locus,
4289 "name %qs is defined as %u-bit while another "
4290 "translation unit defines it as %u-bit",
4291 warn_name, this_enum.vals[j].val.get_precision(),
4292 warn_value.get_precision());
4293 /* FIXME: In case there is easy way to print wide_ints,
4294 perhaps we could do it here instead of overflow check. */
4295 else if (wi::fits_shwi_p (this_enum.vals[j].val)
4296 && wi::fits_shwi_p (warn_value))
4297 inform (this_enum.vals[j].locus,
4298 "name %qs is defined to %wd while another "
4299 "translation unit defines it as %wd",
4300 warn_name, this_enum.vals[j].val.to_shwi (),
4301 warn_value.to_shwi ());
4302 else
4303 inform (this_enum.vals[j].locus,
4304 "name %qs is defined to different value "
4305 "in another translation unit",
4306 warn_name);
4307
4308 inform (id_locus,
4309 "mismatching definition");
4310 }
4311 else
4312 data_in->location_cache.revert_location_cache ();
4313 }
4314 if (warn_name)
4315 free (warn_name);
4316 obstack_free (&odr_enum_obstack, name);
4317 }
4318 }
4319 lto_free_section_data (file_data, LTO_section_ipa_fn_summary, NULL, data,
4320 len);
4321 lto_data_in_delete (data_in);
4322 }
4323
4324 /* Read all ODR type sections. */
4325
4326 static void
ipa_odr_summary_read(void)4327 ipa_odr_summary_read (void)
4328 {
4329 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4330 struct lto_file_decl_data *file_data;
4331 unsigned int j = 0;
4332
4333 while ((file_data = file_data_vec[j++]))
4334 {
4335 size_t len;
4336 const char *data
4337 = lto_get_summary_section_data (file_data, LTO_section_odr_types,
4338 &len);
4339 if (data)
4340 ipa_odr_read_section (file_data, data, len);
4341 }
4342 /* Enum info is used only to produce warnings. Only case we will need it
4343 again is streaming for incremental LTO. */
4344 if (flag_incremental_link != INCREMENTAL_LINK_LTO)
4345 {
4346 delete odr_enum_map;
4347 obstack_free (&odr_enum_obstack, NULL);
4348 odr_enum_map = NULL;
4349 }
4350 }
4351
4352 namespace {
4353
4354 const pass_data pass_data_ipa_odr =
4355 {
4356 IPA_PASS, /* type */
4357 "odr", /* name */
4358 OPTGROUP_NONE, /* optinfo_flags */
4359 TV_IPA_ODR, /* tv_id */
4360 0, /* properties_required */
4361 0, /* properties_provided */
4362 0, /* properties_destroyed */
4363 0, /* todo_flags_start */
4364 0, /* todo_flags_finish */
4365 };
4366
4367 class pass_ipa_odr : public ipa_opt_pass_d
4368 {
4369 public:
pass_ipa_odr(gcc::context * ctxt)4370 pass_ipa_odr (gcc::context *ctxt)
4371 : ipa_opt_pass_d (pass_data_ipa_odr, ctxt,
4372 NULL, /* generate_summary */
4373 ipa_odr_summary_write, /* write_summary */
4374 ipa_odr_summary_read, /* read_summary */
4375 NULL, /* write_optimization_summary */
4376 NULL, /* read_optimization_summary */
4377 NULL, /* stmt_fixup */
4378 0, /* function_transform_todo_flags_start */
4379 NULL, /* function_transform */
4380 NULL) /* variable_transform */
4381 {}
4382
4383 /* opt_pass methods: */
gate(function *)4384 virtual bool gate (function *)
4385 {
4386 return (in_lto_p || flag_lto);
4387 }
4388
execute(function *)4389 virtual unsigned int execute (function *)
4390 {
4391 return 0;
4392 }
4393
4394 }; // class pass_ipa_odr
4395
4396 } // anon namespace
4397
4398 ipa_opt_pass_d *
make_pass_ipa_odr(gcc::context * ctxt)4399 make_pass_ipa_odr (gcc::context *ctxt)
4400 {
4401 return new pass_ipa_odr (ctxt);
4402 }
4403
4404
4405 #include "gt-ipa-devirt.h"
4406