1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013-2020 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
130 #include "gimple-pretty-print.h"
132 #include "stringpool.h"
135 /* Hash based set of pairs of types. */
143 struct default_hash_traits
<type_pair
>
144 : typed_noop_remove
<type_pair
>
146 GTY((skip
)) typedef type_pair value_type
;
147 GTY((skip
)) typedef type_pair compare_type
;
151 return TYPE_UID (p
.first
) ^ TYPE_UID (p
.second
);
154 is_empty (type_pair p
)
156 return p
.first
== NULL
;
159 is_deleted (type_pair p ATTRIBUTE_UNUSED
)
164 equal (const type_pair
&a
, const type_pair
&b
)
166 return a
.first
==b
.first
&& a
.second
== b
.second
;
169 mark_empty (type_pair
&e
)
175 /* HACK alert: this is used to communicate with ipa-inline-transform that
176 thunk is being expanded and there is no need to clear the polymorphic
177 call target cache. */
178 bool thunk_expansion
;
180 static bool odr_types_equivalent_p (tree
, tree
, bool, bool *,
181 hash_set
<type_pair
> *,
182 location_t
, location_t
);
183 static void warn_odr (tree t1
, tree t2
, tree st1
, tree st2
,
184 bool warn
, bool *warned
, const char *reason
);
186 static bool odr_violation_reported
= false;
189 /* Pointer set of all call targets appearing in the cache. */
190 static hash_set
<cgraph_node
*> *cached_polymorphic_call_targets
;
192 /* The node of type inheritance graph. For each type unique in
193 One Definition Rule (ODR) sense, we produce one node linking all
194 main variants of types equivalent to it, bases and derived types. */
196 struct GTY(()) odr_type_d
200 /* All bases; built only for main variants of types. */
201 vec
<odr_type
> GTY((skip
)) bases
;
202 /* All derived types with virtual methods seen in unit;
203 built only for main variants of types. */
204 vec
<odr_type
> GTY((skip
)) derived_types
;
206 /* All equivalent types, if more than one. */
207 vec
<tree
, va_gc
> *types
;
208 /* Set of all equivalent types, if NON-NULL. */
209 hash_set
<tree
> * GTY((skip
)) types_set
;
211 /* Unique ID indexing the type in odr_types array. */
213 /* Is it in anonymous namespace? */
214 bool anonymous_namespace
;
215 /* Do we know about all derivations of given type? */
216 bool all_derivations_known
;
217 /* Did we report ODR violation here? */
219 /* Set when virtual table without RTTI prevailed table with. */
221 /* Set when the canonical type is determined using the type name. */
225 /* Return TRUE if all derived types of T are known and thus
226 we may consider the walk of derived type complete.
228 This is typically true only for final anonymous namespace types and types
229 defined within functions (that may be COMDAT and thus shared across units,
230 but with the same set of derived types). */
233 type_all_derivations_known_p (const_tree t
)
235 if (TYPE_FINAL_P (t
))
239 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
240 if (!TYPE_NAME (t
) || TREE_CODE (TYPE_NAME (t
)) != TYPE_DECL
)
242 if (type_in_anonymous_namespace_p (t
))
244 return (decl_function_context (TYPE_NAME (t
)) != NULL
);
247 /* Return TRUE if type's constructors are all visible. */
250 type_all_ctors_visible_p (tree t
)
253 && symtab
->state
>= CONSTRUCTION
254 /* We cannot always use type_all_derivations_known_p.
255 For function local types we must assume case where
256 the function is COMDAT and shared in between units.
258 TODO: These cases are quite easy to get, but we need
259 to keep track of C++ privatizing via -Wno-weak
260 as well as the IPA privatizing. */
261 && type_in_anonymous_namespace_p (t
);
264 /* Return TRUE if type may have instance. */
267 type_possibly_instantiated_p (tree t
)
272 /* TODO: Add abstract types here. */
273 if (!type_all_ctors_visible_p (t
))
276 vtable
= BINFO_VTABLE (TYPE_BINFO (t
));
277 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
278 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
279 vnode
= varpool_node::get (vtable
);
280 return vnode
&& vnode
->definition
;
283 /* Hash used to unify ODR types based on their mangled name and for anonymous
286 struct odr_name_hasher
: pointer_hash
<odr_type_d
>
288 typedef union tree_node
*compare_type
;
289 static inline hashval_t
hash (const odr_type_d
*);
290 static inline bool equal (const odr_type_d
*, const tree_node
*);
291 static inline void remove (odr_type_d
*);
295 can_be_name_hashed_p (tree t
)
297 return (!in_lto_p
|| odr_type_p (t
));
300 /* Hash type by its ODR name. */
303 hash_odr_name (const_tree t
)
305 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
307 /* If not in LTO, all main variants are unique, so we can do
310 return htab_hash_pointer (t
);
312 /* Anonymous types are unique. */
313 if (type_with_linkage_p (t
) && type_in_anonymous_namespace_p (t
))
314 return htab_hash_pointer (t
);
316 gcc_checking_assert (TYPE_NAME (t
)
317 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
)));
318 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t
)));
321 /* Return the computed hashcode for ODR_TYPE. */
324 odr_name_hasher::hash (const odr_type_d
*odr_type
)
326 return hash_odr_name (odr_type
->type
);
329 /* For languages with One Definition Rule, work out if
330 types are the same based on their name.
332 This is non-trivial for LTO where minor differences in
333 the type representation may have prevented type merging
334 to merge two copies of otherwise equivalent type.
336 Until we start streaming mangled type names, this function works
337 only for polymorphic types.
341 types_same_for_odr (const_tree type1
, const_tree type2
)
343 gcc_checking_assert (TYPE_P (type1
) && TYPE_P (type2
));
345 type1
= TYPE_MAIN_VARIANT (type1
);
346 type2
= TYPE_MAIN_VARIANT (type2
);
354 /* Anonymous namespace types are never duplicated. */
355 if ((type_with_linkage_p (type1
) && type_in_anonymous_namespace_p (type1
))
356 || (type_with_linkage_p (type2
) && type_in_anonymous_namespace_p (type2
)))
359 /* If both type has mangled defined check if they are same.
360 Watch for anonymous types which are all mangled as "<anon">. */
361 if (!type_with_linkage_p (type1
) || !type_with_linkage_p (type2
))
363 if (type_in_anonymous_namespace_p (type1
)
364 || type_in_anonymous_namespace_p (type2
))
366 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1
))
367 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2
)));
370 /* Return true if we can decide on ODR equivalency.
372 In non-LTO it is always decide, in LTO however it depends in the type has
373 ODR info attached. */
376 types_odr_comparable (tree t1
, tree t2
)
379 || TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
)
380 || (odr_type_p (TYPE_MAIN_VARIANT (t1
))
381 && odr_type_p (TYPE_MAIN_VARIANT (t2
))));
384 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
385 known, be conservative and return false. */
388 types_must_be_same_for_odr (tree t1
, tree t2
)
390 if (types_odr_comparable (t1
, t2
))
391 return types_same_for_odr (t1
, t2
);
393 return TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
);
396 /* If T is compound type, return type it is based on. */
399 compound_type_base (const_tree t
)
401 if (TREE_CODE (t
) == ARRAY_TYPE
402 || POINTER_TYPE_P (t
)
403 || TREE_CODE (t
) == COMPLEX_TYPE
404 || VECTOR_TYPE_P (t
))
405 return TREE_TYPE (t
);
406 if (TREE_CODE (t
) == METHOD_TYPE
)
407 return TYPE_METHOD_BASETYPE (t
);
408 if (TREE_CODE (t
) == OFFSET_TYPE
)
409 return TYPE_OFFSET_BASETYPE (t
);
413 /* Return true if T is either ODR type or compound type based from it.
414 If the function return true, we know that T is a type originating from C++
415 source even at link-time. */
418 odr_or_derived_type_p (const_tree t
)
422 if (odr_type_p (TYPE_MAIN_VARIANT (t
)))
424 /* Function type is a tricky one. Basically we can consider it
425 ODR derived if return type or any of the parameters is.
426 We need to check all parameters because LTO streaming merges
427 common types (such as void) and they are not considered ODR then. */
428 if (TREE_CODE (t
) == FUNCTION_TYPE
)
430 if (TYPE_METHOD_BASETYPE (t
))
431 t
= TYPE_METHOD_BASETYPE (t
);
434 if (TREE_TYPE (t
) && odr_or_derived_type_p (TREE_TYPE (t
)))
436 for (t
= TYPE_ARG_TYPES (t
); t
; t
= TREE_CHAIN (t
))
437 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t
))))
443 t
= compound_type_base (t
);
449 /* Compare types T1 and T2 and return true if they are
453 odr_name_hasher::equal (const odr_type_d
*o1
, const tree_node
*t2
)
457 gcc_checking_assert (TYPE_MAIN_VARIANT (t2
) == t2
);
458 gcc_checking_assert (TYPE_MAIN_VARIANT (t1
) == t1
);
463 /* Check for anonymous namespaces. */
464 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
465 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
467 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
)));
468 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
469 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
))
470 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
473 /* Free ODR type V. */
476 odr_name_hasher::remove (odr_type_d
*v
)
479 v
->derived_types
.release ();
485 /* ODR type hash used to look up ODR type based on tree type node. */
487 typedef hash_table
<odr_name_hasher
> odr_hash_type
;
488 static odr_hash_type
*odr_hash
;
490 /* ODR types are also stored into ODR_TYPE vector to allow consistent
491 walking. Bases appear before derived types. Vector is garbage collected
492 so we won't end up visiting empty types. */
494 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
495 #define odr_types (*odr_types_ptr)
497 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
499 set_type_binfo (tree type
, tree binfo
)
501 for (; type
; type
= TYPE_NEXT_VARIANT (type
))
502 if (COMPLETE_TYPE_P (type
))
503 TYPE_BINFO (type
) = binfo
;
505 gcc_assert (!TYPE_BINFO (type
));
508 /* Return true if type variants match.
509 This assumes that we already verified that T1 and T2 are variants of the
513 type_variants_equivalent_p (tree t1
, tree t2
)
515 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
518 if (comp_type_attributes (t1
, t2
) != 1)
521 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
)
522 && TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
528 /* Compare T1 and T2 based on name or structure. */
531 odr_subtypes_equivalent_p (tree t1
, tree t2
,
532 hash_set
<type_pair
> *visited
,
533 location_t loc1
, location_t loc2
)
536 /* This can happen in incomplete types that should be handled earlier. */
537 gcc_assert (t1
&& t2
);
542 /* Anonymous namespace types must match exactly. */
543 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
544 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
545 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2
))
546 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2
))))
549 /* For ODR types be sure to compare their names.
550 To support -Wno-odr-type-merging we allow one type to be non-ODR
551 and other ODR even though it is a violation. */
552 if (types_odr_comparable (t1
, t2
))
555 && odr_type_p (TYPE_MAIN_VARIANT (t1
))
556 && get_odr_type (TYPE_MAIN_VARIANT (t1
), true)->odr_violated
)
558 if (!types_same_for_odr (t1
, t2
))
560 if (!type_variants_equivalent_p (t1
, t2
))
562 /* Limit recursion: If subtypes are ODR types and we know
563 that they are same, be happy. */
564 if (odr_type_p (TYPE_MAIN_VARIANT (t1
)))
568 /* Component types, builtins and possibly violating ODR types
569 have to be compared structurally. */
570 if (TREE_CODE (t1
) != TREE_CODE (t2
))
572 if (AGGREGATE_TYPE_P (t1
)
573 && (TYPE_NAME (t1
) == NULL_TREE
) != (TYPE_NAME (t2
) == NULL_TREE
))
576 type_pair pair
={TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
)};
577 if (TYPE_UID (TYPE_MAIN_VARIANT (t1
)) > TYPE_UID (TYPE_MAIN_VARIANT (t2
)))
579 pair
.first
= TYPE_MAIN_VARIANT (t2
);
580 pair
.second
= TYPE_MAIN_VARIANT (t1
);
582 if (visited
->add (pair
))
584 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
),
585 false, NULL
, visited
, loc1
, loc2
))
587 if (!type_variants_equivalent_p (t1
, t2
))
592 /* Return true if DECL1 and DECL2 are identical methods. Consider
593 name equivalent to name.localalias.xyz. */
596 methods_equal_p (tree decl1
, tree decl2
)
598 if (DECL_ASSEMBLER_NAME (decl1
) == DECL_ASSEMBLER_NAME (decl2
))
600 const char sep
= symbol_table::symbol_suffix_separator ();
602 const char *name1
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1
));
603 const char *ptr1
= strchr (name1
, sep
);
604 int len1
= ptr1
? ptr1
- name1
: strlen (name1
);
606 const char *name2
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2
));
607 const char *ptr2
= strchr (name2
, sep
);
608 int len2
= ptr2
? ptr2
- name2
: strlen (name2
);
612 return !strncmp (name1
, name2
, len1
);
615 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
616 violation warnings. */
619 compare_virtual_tables (varpool_node
*prevailing
, varpool_node
*vtable
)
623 if (DECL_VIRTUAL_P (prevailing
->decl
) != DECL_VIRTUAL_P (vtable
->decl
))
625 odr_violation_reported
= true;
626 if (DECL_VIRTUAL_P (prevailing
->decl
))
628 varpool_node
*tmp
= prevailing
;
632 auto_diagnostic_group d
;
633 if (warning_at (DECL_SOURCE_LOCATION
634 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
636 "virtual table of type %qD violates one definition rule",
637 DECL_CONTEXT (vtable
->decl
)))
638 inform (DECL_SOURCE_LOCATION (prevailing
->decl
),
639 "variable of same assembler name as the virtual table is "
640 "defined in another translation unit");
643 if (!prevailing
->definition
|| !vtable
->definition
)
646 /* If we do not stream ODR type info, do not bother to do useful compare. */
647 if (!TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))
648 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))))
651 odr_type class_type
= get_odr_type (DECL_CONTEXT (vtable
->decl
), true);
653 if (class_type
->odr_violated
)
656 for (n1
= 0, n2
= 0; true; n1
++, n2
++)
658 struct ipa_ref
*ref1
, *ref2
;
661 end1
= !prevailing
->iterate_reference (n1
, ref1
);
662 end2
= !vtable
->iterate_reference (n2
, ref2
);
664 /* !DECL_VIRTUAL_P means RTTI entry;
665 We warn when RTTI is lost because non-RTTI prevails; we silently
666 accept the other case. */
669 || (methods_equal_p (ref1
->referred
->decl
,
670 ref2
->referred
->decl
)
671 && TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
))
672 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
674 if (!class_type
->rtti_broken
)
676 auto_diagnostic_group d
;
677 if (warning_at (DECL_SOURCE_LOCATION
678 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
680 "virtual table of type %qD contains RTTI "
682 DECL_CONTEXT (vtable
->decl
)))
684 inform (DECL_SOURCE_LOCATION
685 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
686 "but is prevailed by one without from other"
687 " translation unit");
688 inform (DECL_SOURCE_LOCATION
689 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
690 "RTTI will not work on this type");
691 class_type
->rtti_broken
= true;
695 end2
= !vtable
->iterate_reference (n2
, ref2
);
699 || (methods_equal_p (ref2
->referred
->decl
, ref1
->referred
->decl
)
700 && TREE_CODE (ref2
->referred
->decl
) == FUNCTION_DECL
))
701 && TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
)
704 end1
= !prevailing
->iterate_reference (n1
, ref1
);
710 /* Extra paranoia; compare the sizes. We do not have information
711 about virtual inheritance offsets, so just be sure that these
713 Do this as very last check so the not very informative error
714 is not output too often. */
715 if (DECL_SIZE (prevailing
->decl
) != DECL_SIZE (vtable
->decl
))
717 class_type
->odr_violated
= true;
718 auto_diagnostic_group d
;
719 tree ctx
= TYPE_NAME (DECL_CONTEXT (vtable
->decl
));
720 if (warning_at (DECL_SOURCE_LOCATION (ctx
), OPT_Wodr
,
721 "virtual table of type %qD violates "
722 "one definition rule",
723 DECL_CONTEXT (vtable
->decl
)))
725 ctx
= TYPE_NAME (DECL_CONTEXT (prevailing
->decl
));
726 inform (DECL_SOURCE_LOCATION (ctx
),
727 "the conflicting type defined in another translation"
728 " unit has virtual table of different size");
736 if (methods_equal_p (ref1
->referred
->decl
, ref2
->referred
->decl
))
739 class_type
->odr_violated
= true;
741 /* If the loops above stopped on non-virtual pointer, we have
742 mismatch in RTTI information mangling. */
743 if (TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
744 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
746 auto_diagnostic_group d
;
747 if (warning_at (DECL_SOURCE_LOCATION
748 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
750 "virtual table of type %qD violates "
751 "one definition rule",
752 DECL_CONTEXT (vtable
->decl
)))
754 inform (DECL_SOURCE_LOCATION
755 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
756 "the conflicting type defined in another translation "
757 "unit with different RTTI information");
761 /* At this point both REF1 and REF2 points either to virtual table
762 or virtual method. If one points to virtual table and other to
763 method we can complain the same way as if one table was shorter
764 than other pointing out the extra method. */
765 if (TREE_CODE (ref1
->referred
->decl
)
766 != TREE_CODE (ref2
->referred
->decl
))
768 if (VAR_P (ref1
->referred
->decl
))
770 else if (VAR_P (ref2
->referred
->decl
))
775 class_type
->odr_violated
= true;
777 /* Complain about size mismatch. Either we have too many virtual
778 functions or too many virtual table pointers. */
783 varpool_node
*tmp
= prevailing
;
788 auto_diagnostic_group d
;
789 if (warning_at (DECL_SOURCE_LOCATION
790 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
792 "virtual table of type %qD violates "
793 "one definition rule",
794 DECL_CONTEXT (vtable
->decl
)))
796 if (TREE_CODE (ref1
->referring
->decl
) == FUNCTION_DECL
)
798 inform (DECL_SOURCE_LOCATION
799 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
800 "the conflicting type defined in another translation "
802 inform (DECL_SOURCE_LOCATION
803 (TYPE_NAME (DECL_CONTEXT (ref1
->referring
->decl
))),
804 "contains additional virtual method %qD",
805 ref1
->referred
->decl
);
809 inform (DECL_SOURCE_LOCATION
810 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
811 "the conflicting type defined in another translation "
812 "unit has virtual table with more entries");
818 /* And in the last case we have either mismatch in between two virtual
819 methods or two virtual table pointers. */
820 auto_diagnostic_group d
;
821 if (warning_at (DECL_SOURCE_LOCATION
822 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))), OPT_Wodr
,
823 "virtual table of type %qD violates "
824 "one definition rule",
825 DECL_CONTEXT (vtable
->decl
)))
827 if (TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
)
829 inform (DECL_SOURCE_LOCATION
830 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
831 "the conflicting type defined in another translation "
833 gcc_assert (TREE_CODE (ref2
->referred
->decl
)
835 inform (DECL_SOURCE_LOCATION
836 (ref1
->referred
->ultimate_alias_target ()->decl
),
837 "virtual method %qD",
838 ref1
->referred
->ultimate_alias_target ()->decl
);
839 inform (DECL_SOURCE_LOCATION
840 (ref2
->referred
->ultimate_alias_target ()->decl
),
841 "ought to match virtual method %qD but does not",
842 ref2
->referred
->ultimate_alias_target ()->decl
);
845 inform (DECL_SOURCE_LOCATION
846 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
847 "the conflicting type defined in another translation "
848 "unit has virtual table with different contents");
854 /* Output ODR violation warning about T1 and T2 with REASON.
855 Display location of ST1 and ST2 if REASON speaks about field or
857 If WARN is false, do nothing. Set WARNED if warning was indeed
861 warn_odr (tree t1
, tree t2
, tree st1
, tree st2
,
862 bool warn
, bool *warned
, const char *reason
)
864 tree decl2
= TYPE_NAME (TYPE_MAIN_VARIANT (t2
));
868 if (!warn
|| !TYPE_NAME(TYPE_MAIN_VARIANT (t1
)))
871 /* ODR warnings are output during LTO streaming; we must apply location
872 cache for potential warnings to be output correctly. */
873 if (lto_location_cache::current_cache
)
874 lto_location_cache::current_cache
->apply_location_cache ();
876 auto_diagnostic_group d
;
877 if (t1
!= TYPE_MAIN_VARIANT (t1
)
878 && TYPE_NAME (t1
) != TYPE_NAME (TYPE_MAIN_VARIANT (t1
)))
880 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1
))),
881 OPT_Wodr
, "type %qT (typedef of %qT) violates the "
882 "C++ One Definition Rule",
883 t1
, TYPE_MAIN_VARIANT (t1
)))
888 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1
))),
889 OPT_Wodr
, "type %qT violates the C++ One Definition Rule",
895 /* For FIELD_DECL support also case where one of fields is
896 NULL - this is used when the structures have mismatching number of
898 else if (!st1
|| TREE_CODE (st1
) == FIELD_DECL
)
900 inform (DECL_SOURCE_LOCATION (decl2
),
901 "a different type is defined in another translation unit");
907 inform (DECL_SOURCE_LOCATION (st1
),
908 "the first difference of corresponding definitions is field %qD",
913 else if (TREE_CODE (st1
) == FUNCTION_DECL
)
915 inform (DECL_SOURCE_LOCATION (decl2
),
916 "a different type is defined in another translation unit");
917 inform (DECL_SOURCE_LOCATION (st1
),
918 "the first difference of corresponding definitions is method %qD",
924 inform (DECL_SOURCE_LOCATION (decl2
), reason
);
930 /* Return true if T1 and T2 are incompatible and we want to recursively
931 dive into them from warn_type_mismatch to give sensible answer. */
934 type_mismatch_p (tree t1
, tree t2
)
936 if (odr_or_derived_type_p (t1
) && odr_or_derived_type_p (t2
)
937 && !odr_types_equivalent_p (t1
, t2
))
939 return !types_compatible_p (t1
, t2
);
943 /* Types T1 and T2 was found to be incompatible in a context they can't
944 (either used to declare a symbol of same assembler name or unified by
945 ODR rule). We already output warning about this, but if possible, output
946 extra information on how the types mismatch.
948 This is hard to do in general. We basically handle the common cases.
950 If LOC1 and LOC2 are meaningful locations, use it in the case the types
951 themselves do not have one. */
954 warn_types_mismatch (tree t1
, tree t2
, location_t loc1
, location_t loc2
)
956 /* Location of type is known only if it has TYPE_NAME and the name is
958 location_t loc_t1
= TYPE_NAME (t1
) && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
959 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1
))
961 location_t loc_t2
= TYPE_NAME (t2
) && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
962 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2
))
964 bool loc_t2_useful
= false;
966 /* With LTO it is a common case that the location of both types match.
967 See if T2 has a location that is different from T1. If so, we will
968 inform user about the location.
969 Do not consider the location passed to us in LOC1/LOC2 as those are
971 if (loc_t2
> BUILTINS_LOCATION
&& loc_t2
!= loc_t1
)
973 if (loc_t1
<= BUILTINS_LOCATION
)
974 loc_t2_useful
= true;
977 expanded_location xloc1
= expand_location (loc_t1
);
978 expanded_location xloc2
= expand_location (loc_t2
);
980 if (strcmp (xloc1
.file
, xloc2
.file
)
981 || xloc1
.line
!= xloc2
.line
982 || xloc1
.column
!= xloc2
.column
)
983 loc_t2_useful
= true;
987 if (loc_t1
<= BUILTINS_LOCATION
)
989 if (loc_t2
<= BUILTINS_LOCATION
)
992 location_t loc
= loc_t1
<= BUILTINS_LOCATION
? loc_t2
: loc_t1
;
994 /* It is a quite common bug to reference anonymous namespace type in
995 non-anonymous namespace class. */
996 tree mt1
= TYPE_MAIN_VARIANT (t1
);
997 tree mt2
= TYPE_MAIN_VARIANT (t2
);
998 if ((type_with_linkage_p (mt1
)
999 && type_in_anonymous_namespace_p (mt1
))
1000 || (type_with_linkage_p (mt2
)
1001 && type_in_anonymous_namespace_p (mt2
)))
1003 if (!type_with_linkage_p (mt1
)
1004 || !type_in_anonymous_namespace_p (mt1
))
1007 std::swap (mt1
, mt2
);
1008 std::swap (loc_t1
, loc_t2
);
1010 gcc_assert (TYPE_NAME (mt1
)
1011 && TREE_CODE (TYPE_NAME (mt1
)) == TYPE_DECL
);
1012 tree n1
= TYPE_NAME (mt1
);
1013 tree n2
= TYPE_NAME (mt2
) ? TYPE_NAME (mt2
) : NULL
;
1015 if (TREE_CODE (n1
) == TYPE_DECL
)
1016 n1
= DECL_NAME (n1
);
1017 if (n2
&& TREE_CODE (n2
) == TYPE_DECL
)
1018 n2
= DECL_NAME (n2
);
1019 /* Most of the time, the type names will match, do not be unnecessarily
1023 "type %qT defined in anonymous namespace cannot match "
1024 "type %qT across the translation unit boundary",
1028 "type %qT defined in anonymous namespace cannot match "
1029 "across the translation unit boundary",
1033 "the incompatible type defined in another translation unit");
1036 /* If types have mangled ODR names and they are different, it is most
1037 informative to output those.
1038 This also covers types defined in different namespaces. */
1039 const char *odr1
= get_odr_name_for_type (mt1
);
1040 const char *odr2
= get_odr_name_for_type (mt2
);
1041 if (odr1
!= NULL
&& odr2
!= NULL
&& odr1
!= odr2
)
1043 const int opts
= DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
;
1044 char *name1
= xstrdup (cplus_demangle (odr1
, opts
));
1045 char *name2
= cplus_demangle (odr2
, opts
);
1046 if (name1
&& name2
&& strcmp (name1
, name2
))
1049 "type name %qs should match type name %qs",
1053 "the incompatible type is defined here");
1059 /* A tricky case are compound types. Often they appear the same in source
1060 code and the mismatch is dragged in by type they are build from.
1061 Look for those differences in subtypes and try to be informative. In other
1062 cases just output nothing because the source code is probably different
1063 and in this case we already output a all necessary info. */
1064 if (!TYPE_NAME (t1
) || !TYPE_NAME (t2
))
1066 if (TREE_CODE (t1
) == TREE_CODE (t2
))
1068 if (TREE_CODE (t1
) == ARRAY_TYPE
1069 && COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1071 tree i1
= TYPE_DOMAIN (t1
);
1072 tree i2
= TYPE_DOMAIN (t2
);
1075 && TYPE_MAX_VALUE (i1
)
1076 && TYPE_MAX_VALUE (i2
)
1077 && !operand_equal_p (TYPE_MAX_VALUE (i1
),
1078 TYPE_MAX_VALUE (i2
), 0))
1081 "array types have different bounds");
1085 if ((POINTER_TYPE_P (t1
) || TREE_CODE (t1
) == ARRAY_TYPE
)
1086 && type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1087 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
, loc_t2
);
1088 else if (TREE_CODE (t1
) == METHOD_TYPE
1089 || TREE_CODE (t1
) == FUNCTION_TYPE
)
1091 tree parms1
= NULL
, parms2
= NULL
;
1094 if (type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1096 inform (loc
, "return value type mismatch");
1097 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
,
1101 if (prototype_p (t1
) && prototype_p (t2
))
1102 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1104 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
),
1107 if (type_mismatch_p (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
1109 if (count
== 1 && TREE_CODE (t1
) == METHOD_TYPE
)
1111 "implicit this pointer type mismatch");
1114 "type mismatch in parameter %i",
1115 count
- (TREE_CODE (t1
) == METHOD_TYPE
));
1116 warn_types_mismatch (TREE_VALUE (parms1
),
1117 TREE_VALUE (parms2
),
1122 if (parms1
|| parms2
)
1125 "types have different parameter counts");
1133 if (types_odr_comparable (t1
, t2
)
1134 /* We make assign integers mangled names to be able to handle
1135 signed/unsigned chars. Accepting them here would however lead to
1136 confusing message like
1137 "type ‘const int’ itself violates the C++ One Definition Rule" */
1138 && TREE_CODE (t1
) != INTEGER_TYPE
1139 && types_same_for_odr (t1
, t2
))
1141 "type %qT itself violates the C++ One Definition Rule", t1
);
1142 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1143 else if (TYPE_NAME (t1
) == TYPE_NAME (t2
)
1144 && TREE_CODE (t1
) == TREE_CODE (t2
) && !loc_t2_useful
)
1147 inform (loc_t1
, "type %qT should match type %qT",
1150 inform (loc_t2
, "the incompatible type is defined here");
1153 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1156 skip_in_fields_list_p (tree t
)
1158 if (TREE_CODE (t
) != FIELD_DECL
)
1160 /* C++ FE introduces zero sized fields depending on -std setting, see
1163 && integer_zerop (DECL_SIZE (t
))
1164 && DECL_ARTIFICIAL (t
)
1165 && DECL_IGNORED_P (t
)
1171 /* Compare T1 and T2, report ODR violations if WARN is true and set
1172 WARNED to true if anything is reported. Return true if types match.
1173 If true is returned, the types are also compatible in the sense of
1174 gimple_canonical_types_compatible_p.
1175 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1176 about the type if the type itself do not have location. */
1179 odr_types_equivalent_p (tree t1
, tree t2
, bool warn
, bool *warned
,
1180 hash_set
<type_pair
> *visited
,
1181 location_t loc1
, location_t loc2
)
1183 /* Check first for the obvious case of pointer identity. */
1187 /* Can't be the same type if the types don't have the same code. */
1188 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1190 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1191 G_("a different type is defined in another translation unit"));
1195 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1
))
1196 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1
)))
1197 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2
))
1198 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2
))))
1200 /* We cannot trip this when comparing ODR types, only when trying to
1201 match different ODR derivations from different declarations.
1202 So WARN should be always false. */
1207 if (TREE_CODE (t1
) == ENUMERAL_TYPE
1208 && TYPE_VALUES (t1
) && TYPE_VALUES (t2
))
1211 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
1212 v1
&& v2
; v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
1214 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
1216 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1217 G_("an enum with different value name"
1218 " is defined in another translation unit"));
1221 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
), 0))
1223 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1224 G_("an enum with different values is defined"
1225 " in another translation unit"));
1231 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1232 G_("an enum with mismatching number of values "
1233 "is defined in another translation unit"));
1238 /* Non-aggregate types can be handled cheaply. */
1239 if (INTEGRAL_TYPE_P (t1
)
1240 || SCALAR_FLOAT_TYPE_P (t1
)
1241 || FIXED_POINT_TYPE_P (t1
)
1242 || TREE_CODE (t1
) == VECTOR_TYPE
1243 || TREE_CODE (t1
) == COMPLEX_TYPE
1244 || TREE_CODE (t1
) == OFFSET_TYPE
1245 || POINTER_TYPE_P (t1
))
1247 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
1249 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1250 G_("a type with different precision is defined "
1251 "in another translation unit"));
1254 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
1256 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1257 G_("a type with different signedness is defined "
1258 "in another translation unit"));
1262 if (TREE_CODE (t1
) == INTEGER_TYPE
1263 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
1265 /* char WRT uint_8? */
1266 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1267 G_("a different type is defined in another "
1268 "translation unit"));
1272 /* For canonical type comparisons we do not want to build SCCs
1273 so we cannot compare pointed-to types. But we can, for now,
1274 require the same pointed-to type kind and match what
1275 useless_type_conversion_p would do. */
1276 if (POINTER_TYPE_P (t1
))
1278 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
1279 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
1281 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1282 G_("it is defined as a pointer in different address "
1283 "space in another translation unit"));
1287 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1288 visited
, loc1
, loc2
))
1290 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1291 G_("it is defined as a pointer to different type "
1292 "in another translation unit"));
1294 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
),
1300 if ((TREE_CODE (t1
) == VECTOR_TYPE
|| TREE_CODE (t1
) == COMPLEX_TYPE
)
1301 && !odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1302 visited
, loc1
, loc2
))
1304 /* Probably specific enough. */
1305 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1306 G_("a different type is defined "
1307 "in another translation unit"));
1309 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1313 /* Do type-specific comparisons. */
1314 else switch (TREE_CODE (t1
))
1318 /* Array types are the same if the element types are the same and
1319 the number of elements are the same. */
1320 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1321 visited
, loc1
, loc2
))
1323 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1324 G_("a different type is defined in another "
1325 "translation unit"));
1327 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1329 gcc_assert (TYPE_STRING_FLAG (t1
) == TYPE_STRING_FLAG (t2
));
1330 gcc_assert (TYPE_NONALIASED_COMPONENT (t1
)
1331 == TYPE_NONALIASED_COMPONENT (t2
));
1333 tree i1
= TYPE_DOMAIN (t1
);
1334 tree i2
= TYPE_DOMAIN (t2
);
1336 /* For an incomplete external array, the type domain can be
1337 NULL_TREE. Check this condition also. */
1338 if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
1339 return type_variants_equivalent_p (t1
, t2
);
1341 tree min1
= TYPE_MIN_VALUE (i1
);
1342 tree min2
= TYPE_MIN_VALUE (i2
);
1343 tree max1
= TYPE_MAX_VALUE (i1
);
1344 tree max2
= TYPE_MAX_VALUE (i2
);
1346 /* In C++, minimums should be always 0. */
1347 gcc_assert (min1
== min2
);
1348 if (!operand_equal_p (max1
, max2
, 0))
1350 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1351 G_("an array of different size is defined "
1352 "in another translation unit"));
1360 /* Function types are the same if the return type and arguments types
1362 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1363 visited
, loc1
, loc2
))
1365 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1366 G_("has different return value "
1367 "in another translation unit"));
1369 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1373 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
1374 || !prototype_p (t1
) || !prototype_p (t2
))
1375 return type_variants_equivalent_p (t1
, t2
);
1378 tree parms1
, parms2
;
1380 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1382 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
1384 if (!odr_subtypes_equivalent_p
1385 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
1386 visited
, loc1
, loc2
))
1388 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1389 G_("has different parameters in another "
1390 "translation unit"));
1392 warn_types_mismatch (TREE_VALUE (parms1
),
1393 TREE_VALUE (parms2
), loc1
, loc2
);
1398 if (parms1
|| parms2
)
1400 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1401 G_("has different parameters "
1402 "in another translation unit"));
1406 return type_variants_equivalent_p (t1
, t2
);
1411 case QUAL_UNION_TYPE
:
1415 /* For aggregate types, all the fields must be the same. */
1416 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1418 if (TYPE_BINFO (t1
) && TYPE_BINFO (t2
)
1419 && polymorphic_type_binfo_p (TYPE_BINFO (t1
))
1420 != polymorphic_type_binfo_p (TYPE_BINFO (t2
)))
1422 if (polymorphic_type_binfo_p (TYPE_BINFO (t1
)))
1423 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1424 G_("a type defined in another translation unit "
1425 "is not polymorphic"));
1427 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1428 G_("a type defined in another translation unit "
1432 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
1434 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1436 /* Skip non-fields. */
1437 while (f1
&& skip_in_fields_list_p (f1
))
1438 f1
= TREE_CHAIN (f1
);
1439 while (f2
&& skip_in_fields_list_p (f2
))
1440 f2
= TREE_CHAIN (f2
);
1443 if (DECL_VIRTUAL_P (f1
) != DECL_VIRTUAL_P (f2
))
1445 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1446 G_("a type with different virtual table pointers"
1447 " is defined in another translation unit"));
1450 if (DECL_ARTIFICIAL (f1
) != DECL_ARTIFICIAL (f2
))
1452 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1453 G_("a type with different bases is defined "
1454 "in another translation unit"));
1457 if (DECL_NAME (f1
) != DECL_NAME (f2
)
1458 && !DECL_ARTIFICIAL (f1
))
1460 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1461 G_("a field with different name is defined "
1462 "in another translation unit"));
1465 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1
),
1467 visited
, loc1
, loc2
))
1469 /* Do not warn about artificial fields and just go into
1470 generic field mismatch warning. */
1471 if (DECL_ARTIFICIAL (f1
))
1474 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1475 G_("a field of same name but different type "
1476 "is defined in another translation unit"));
1478 warn_types_mismatch (TREE_TYPE (f1
), TREE_TYPE (f2
), loc1
, loc2
);
1481 if (!gimple_compare_field_offset (f1
, f2
))
1483 /* Do not warn about artificial fields and just go into
1484 generic field mismatch warning. */
1485 if (DECL_ARTIFICIAL (f1
))
1487 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1488 G_("fields have different layout "
1489 "in another translation unit"));
1492 if (DECL_BIT_FIELD (f1
) != DECL_BIT_FIELD (f2
))
1494 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1495 G_("one field is a bitfield while the other "
1500 gcc_assert (DECL_NONADDRESSABLE_P (f1
)
1501 == DECL_NONADDRESSABLE_P (f2
));
1504 /* If one aggregate has more fields than the other, they
1505 are not the same. */
1508 if ((f1
&& DECL_VIRTUAL_P (f1
)) || (f2
&& DECL_VIRTUAL_P (f2
)))
1509 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1510 G_("a type with different virtual table pointers"
1511 " is defined in another translation unit"));
1512 else if ((f1
&& DECL_ARTIFICIAL (f1
))
1513 || (f2
&& DECL_ARTIFICIAL (f2
)))
1514 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1515 G_("a type with different bases is defined "
1516 "in another translation unit"));
1518 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1519 G_("a type with different number of fields "
1520 "is defined in another translation unit"));
1536 /* Those are better to come last as they are utterly uninformative. */
1537 if (TYPE_SIZE (t1
) && TYPE_SIZE (t2
)
1538 && !operand_equal_p (TYPE_SIZE (t1
), TYPE_SIZE (t2
), 0))
1540 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1541 G_("a type with different size "
1542 "is defined in another translation unit"));
1546 gcc_assert (!TYPE_SIZE_UNIT (t1
) || !TYPE_SIZE_UNIT (t2
)
1547 || operand_equal_p (TYPE_SIZE_UNIT (t1
),
1548 TYPE_SIZE_UNIT (t2
), 0));
1549 return type_variants_equivalent_p (t1
, t2
);
1552 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1555 odr_types_equivalent_p (tree type1
, tree type2
)
1557 gcc_checking_assert (odr_or_derived_type_p (type1
)
1558 && odr_or_derived_type_p (type2
));
1560 hash_set
<type_pair
> visited
;
1561 return odr_types_equivalent_p (type1
, type2
, false, NULL
,
1562 &visited
, UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1565 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1566 from VAL->type. This may happen in LTO where tree merging did not merge
1567 all variants of the same type or due to ODR violation.
1569 Analyze and report ODR violations and add type to duplicate list.
1570 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1571 this is first time we see definition of a class return true so the
1572 base types are analyzed. */
1575 add_type_duplicate (odr_type val
, tree type
)
1577 bool build_bases
= false;
1578 bool prevail
= false;
1579 bool odr_must_violate
= false;
1581 if (!val
->types_set
)
1582 val
->types_set
= new hash_set
<tree
>;
1584 /* Chose polymorphic type as leader (this happens only in case of ODR
1586 if ((TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
1587 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
1588 && (TREE_CODE (val
->type
) != RECORD_TYPE
|| !TYPE_BINFO (val
->type
)
1589 || !polymorphic_type_binfo_p (TYPE_BINFO (val
->type
))))
1594 /* Always prefer complete type to be the leader. */
1595 else if (!COMPLETE_TYPE_P (val
->type
) && COMPLETE_TYPE_P (type
))
1598 if (TREE_CODE (type
) == RECORD_TYPE
)
1599 build_bases
= TYPE_BINFO (type
);
1601 else if (COMPLETE_TYPE_P (val
->type
) && !COMPLETE_TYPE_P (type
))
1603 else if (TREE_CODE (val
->type
) == ENUMERAL_TYPE
1604 && TREE_CODE (type
) == ENUMERAL_TYPE
1605 && !TYPE_VALUES (val
->type
) && TYPE_VALUES (type
))
1607 else if (TREE_CODE (val
->type
) == RECORD_TYPE
1608 && TREE_CODE (type
) == RECORD_TYPE
1609 && TYPE_BINFO (type
) && !TYPE_BINFO (val
->type
))
1611 gcc_assert (!val
->bases
.length ());
1617 std::swap (val
->type
, type
);
1619 val
->types_set
->add (type
);
1624 gcc_checking_assert (can_be_name_hashed_p (type
)
1625 && can_be_name_hashed_p (val
->type
));
1628 bool base_mismatch
= false;
1630 bool warned
= false;
1631 hash_set
<type_pair
> visited
;
1633 gcc_assert (in_lto_p
);
1634 vec_safe_push (val
->types
, type
);
1636 /* If both are class types, compare the bases. */
1637 if (COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
1638 && TREE_CODE (val
->type
) == RECORD_TYPE
1639 && TREE_CODE (type
) == RECORD_TYPE
1640 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
))
1642 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1643 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1645 if (!flag_ltrans
&& !warned
&& !val
->odr_violated
)
1648 warn_odr (type
, val
->type
, NULL
, NULL
, !warned
, &warned
,
1649 "a type with the same name but different "
1650 "number of polymorphic bases is "
1651 "defined in another translation unit");
1654 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1655 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1656 extra_base
= BINFO_BASE_BINFO
1658 BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)));
1660 extra_base
= BINFO_BASE_BINFO
1661 (TYPE_BINFO (val
->type
),
1662 BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)));
1663 tree extra_base_type
= BINFO_TYPE (extra_base
);
1664 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type
)),
1665 "the extra base is defined here");
1668 base_mismatch
= true;
1671 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
1673 tree base1
= BINFO_BASE_BINFO (TYPE_BINFO (type
), i
);
1674 tree base2
= BINFO_BASE_BINFO (TYPE_BINFO (val
->type
), i
);
1675 tree type1
= BINFO_TYPE (base1
);
1676 tree type2
= BINFO_TYPE (base2
);
1678 if (types_odr_comparable (type1
, type2
))
1680 if (!types_same_for_odr (type1
, type2
))
1681 base_mismatch
= true;
1684 if (!odr_types_equivalent_p (type1
, type2
))
1685 base_mismatch
= true;
1688 if (!warned
&& !val
->odr_violated
)
1690 warn_odr (type
, val
->type
, NULL
, NULL
,
1692 "a type with the same name but different base "
1693 "type is defined in another translation unit");
1695 warn_types_mismatch (type1
, type2
,
1696 UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1700 if (BINFO_OFFSET (base1
) != BINFO_OFFSET (base2
))
1702 base_mismatch
= true;
1703 if (!warned
&& !val
->odr_violated
)
1704 warn_odr (type
, val
->type
, NULL
, NULL
,
1706 "a type with the same name but different base "
1707 "layout is defined in another translation unit");
1710 /* One of bases is not of complete type. */
1711 if (!TYPE_BINFO (type1
) != !TYPE_BINFO (type2
))
1713 /* If we have a polymorphic type info specified for TYPE1
1714 but not for TYPE2 we possibly missed a base when recording
1716 Be sure this does not happen. */
1717 if (TYPE_BINFO (type1
)
1718 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1720 odr_must_violate
= true;
1723 /* One base is polymorphic and the other not.
1724 This ought to be diagnosed earlier, but do not ICE in the
1726 else if (TYPE_BINFO (type1
)
1727 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1728 != polymorphic_type_binfo_p (TYPE_BINFO (type2
)))
1730 if (!warned
&& !val
->odr_violated
)
1731 warn_odr (type
, val
->type
, NULL
, NULL
,
1733 "a base of the type is polymorphic only in one "
1734 "translation unit");
1735 base_mismatch
= true;
1742 odr_violation_reported
= true;
1743 val
->odr_violated
= true;
1745 if (symtab
->dump_file
)
1747 fprintf (symtab
->dump_file
, "ODR base violation\n");
1749 print_node (symtab
->dump_file
, "", val
->type
, 0);
1750 putc ('\n',symtab
->dump_file
);
1751 print_node (symtab
->dump_file
, "", type
, 0);
1752 putc ('\n',symtab
->dump_file
);
1757 /* Next compare memory layout.
1758 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1759 We must apply the location cache to ensure that they are valid
1760 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1761 if (lto_location_cache::current_cache
)
1762 lto_location_cache::current_cache
->apply_location_cache ();
1763 /* As a special case we stream mangles names of integer types so we can see
1764 if they are believed to be same even though they have different
1765 representation. Avoid bogus warning on mismatches in these. */
1766 if (TREE_CODE (type
) != INTEGER_TYPE
1767 && TREE_CODE (val
->type
) != INTEGER_TYPE
1768 && !odr_types_equivalent_p (val
->type
, type
,
1769 !flag_ltrans
&& !val
->odr_violated
&& !warned
,
1771 DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
1772 DECL_SOURCE_LOCATION (TYPE_NAME (type
))))
1775 odr_violation_reported
= true;
1776 val
->odr_violated
= true;
1778 gcc_assert (val
->odr_violated
|| !odr_must_violate
);
1779 /* Sanity check that all bases will be build same way again. */
1781 && COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
1782 && TREE_CODE (val
->type
) == RECORD_TYPE
1783 && TREE_CODE (type
) == RECORD_TYPE
1784 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
1785 && !val
->odr_violated
1786 && !base_mismatch
&& val
->bases
.length ())
1788 unsigned int num_poly_bases
= 0;
1791 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
1792 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1793 (TYPE_BINFO (type
), i
)))
1795 gcc_assert (num_poly_bases
== val
->bases
.length ());
1796 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
));
1798 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1799 (TYPE_BINFO (type
), i
)))
1801 odr_type base
= get_odr_type
1803 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
1806 gcc_assert (val
->bases
[j
] == base
);
1812 /* Regularize things a little. During LTO same types may come with
1813 different BINFOs. Either because their virtual table was
1814 not merged by tree merging and only later at decl merging or
1815 because one type comes with external vtable, while other
1816 with internal. We want to merge equivalent binfos to conserve
1817 memory and streaming overhead.
1819 The external vtables are more harmful: they contain references
1820 to external declarations of methods that may be defined in the
1821 merged LTO unit. For this reason we absolutely need to remove
1822 them and replace by internal variants. Not doing so will lead
1823 to incomplete answers from possible_polymorphic_call_targets.
1825 FIXME: disable for now; because ODR types are now build during
1826 streaming in, the variants do not need to be linked to the type,
1827 yet. We need to do the merging in cleanup pass to be implemented
1829 if (!flag_ltrans
&& merge
1831 && TREE_CODE (val
->type
) == RECORD_TYPE
1832 && TREE_CODE (type
) == RECORD_TYPE
1833 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
1834 && TYPE_MAIN_VARIANT (type
) == type
1835 && TYPE_MAIN_VARIANT (val
->type
) == val
->type
1836 && BINFO_VTABLE (TYPE_BINFO (val
->type
))
1837 && BINFO_VTABLE (TYPE_BINFO (type
)))
1839 tree master_binfo
= TYPE_BINFO (val
->type
);
1840 tree v1
= BINFO_VTABLE (master_binfo
);
1841 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
1843 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
1845 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
1846 && operand_equal_p (TREE_OPERAND (v1
, 1),
1847 TREE_OPERAND (v2
, 1), 0));
1848 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
1849 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
1851 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
1852 == DECL_ASSEMBLER_NAME (v2
));
1854 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
1858 set_type_binfo (val
->type
, TYPE_BINFO (type
));
1859 for (i
= 0; i
< val
->types
->length (); i
++)
1861 if (TYPE_BINFO ((*val
->types
)[i
])
1863 set_type_binfo ((*val
->types
)[i
], TYPE_BINFO (type
));
1865 BINFO_TYPE (TYPE_BINFO (type
)) = val
->type
;
1868 set_type_binfo (type
, master_binfo
);
1873 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
1876 obj_type_ref_class (const_tree ref
)
1878 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
1879 ref
= TREE_TYPE (ref
);
1880 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
1881 ref
= TREE_TYPE (ref
);
1882 /* We look for type THIS points to. ObjC also builds
1883 OBJ_TYPE_REF with non-method calls, Their first parameter
1884 ID however also corresponds to class type. */
1885 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
1886 || TREE_CODE (ref
) == FUNCTION_TYPE
);
1887 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
1888 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
1889 tree ret
= TREE_TYPE (ref
);
1890 if (!in_lto_p
&& !TYPE_STRUCTURAL_EQUALITY_P (ret
))
1891 ret
= TYPE_CANONICAL (ret
);
1893 ret
= get_odr_type (ret
)->type
;
1897 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1898 possibly new entry. */
1901 get_odr_type (tree type
, bool insert
)
1903 odr_type_d
**slot
= NULL
;
1904 odr_type val
= NULL
;
1906 bool build_bases
= false;
1907 bool insert_to_odr_array
= false;
1910 type
= TYPE_MAIN_VARIANT (type
);
1911 if (!in_lto_p
&& !TYPE_STRUCTURAL_EQUALITY_P (type
))
1912 type
= TYPE_CANONICAL (type
);
1914 gcc_checking_assert (can_be_name_hashed_p (type
));
1916 hash
= hash_odr_name (type
);
1917 slot
= odr_hash
->find_slot_with_hash (type
, hash
,
1918 insert
? INSERT
: NO_INSERT
);
1923 /* See if we already have entry for type. */
1928 if (val
->type
!= type
&& insert
1929 && (!val
->types_set
|| !val
->types_set
->add (type
)))
1930 build_bases
= add_type_duplicate (val
, type
);
1934 val
= ggc_cleared_alloc
<odr_type_d
> ();
1937 val
->derived_types
= vNULL
;
1938 if (type_with_linkage_p (type
))
1939 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
1941 val
->anonymous_namespace
= 0;
1942 build_bases
= COMPLETE_TYPE_P (val
->type
);
1943 insert_to_odr_array
= true;
1947 if (build_bases
&& TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
1948 && type_with_linkage_p (type
)
1949 && type
== TYPE_MAIN_VARIANT (type
))
1951 tree binfo
= TYPE_BINFO (type
);
1954 gcc_assert (BINFO_TYPE (TYPE_BINFO (val
->type
)) == type
);
1956 val
->all_derivations_known
= type_all_derivations_known_p (type
);
1957 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
1958 /* For now record only polymorphic types. other are
1959 pointless for devirtualization and we cannot precisely
1960 determine ODR equivalency of these during LTO. */
1961 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
1963 tree base_type
= BINFO_TYPE (BINFO_BASE_BINFO (binfo
, i
));
1964 odr_type base
= get_odr_type (base_type
, true);
1965 gcc_assert (TYPE_MAIN_VARIANT (base_type
) == base_type
);
1966 base
->derived_types
.safe_push (val
);
1967 val
->bases
.safe_push (base
);
1968 if (base
->id
> base_id
)
1972 /* Ensure that type always appears after bases. */
1973 if (insert_to_odr_array
)
1976 val
->id
= odr_types
.length ();
1977 vec_safe_push (odr_types_ptr
, val
);
1979 else if (base_id
> val
->id
)
1981 odr_types
[val
->id
] = 0;
1982 /* Be sure we did not recorded any derived types; these may need
1984 gcc_assert (val
->derived_types
.length() == 0);
1985 val
->id
= odr_types
.length ();
1986 vec_safe_push (odr_types_ptr
, val
);
1991 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
1992 on ODR violations. */
1995 prevailing_odr_type (tree type
)
1997 odr_type t
= get_odr_type (type
, false);
1998 if (!t
|| t
->odr_violated
)
2003 /* Set tbaa_enabled flag for TYPE. */
2006 enable_odr_based_tbaa (tree type
)
2008 odr_type t
= get_odr_type (type
, true);
2009 t
->tbaa_enabled
= true;
2012 /* True if canonical type of TYPE is determined using ODR name. */
2015 odr_based_tbaa_p (const_tree type
)
2017 if (!RECORD_OR_UNION_TYPE_P (type
))
2019 odr_type t
= get_odr_type (const_cast <tree
> (type
), false);
2020 if (!t
|| !t
->tbaa_enabled
)
2025 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2029 set_type_canonical_for_odr_type (tree type
, tree canonical
)
2031 odr_type t
= get_odr_type (type
, false);
2035 for (tree t2
= t
->type
; t2
; t2
= TYPE_NEXT_VARIANT (t2
))
2036 TYPE_CANONICAL (t2
) = canonical
;
2038 FOR_EACH_VEC_ELT (*t
->types
, i
, tt
)
2039 for (tree t2
= tt
; t2
; t2
= TYPE_NEXT_VARIANT (t2
))
2040 TYPE_CANONICAL (t2
) = canonical
;
2043 /* Return true if we reported some ODR violation on TYPE. */
2046 odr_type_violation_reported_p (tree type
)
2048 return get_odr_type (type
, false)->odr_violated
;
2051 /* Add TYPE of ODR type hash. */
2054 register_odr_type (tree type
)
2057 odr_hash
= new odr_hash_type (23);
2058 if (type
== TYPE_MAIN_VARIANT (type
))
2060 /* To get ODR warnings right, first register all sub-types. */
2061 if (RECORD_OR_UNION_TYPE_P (type
)
2062 && COMPLETE_TYPE_P (type
))
2064 /* Limit recursion on types which are already registered. */
2065 odr_type ot
= get_odr_type (type
, false);
2067 && (ot
->type
== type
2069 && ot
->types_set
->contains (type
))))
2071 for (tree f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
2072 if (TREE_CODE (f
) == FIELD_DECL
)
2074 tree subtype
= TREE_TYPE (f
);
2076 while (TREE_CODE (subtype
) == ARRAY_TYPE
)
2077 subtype
= TREE_TYPE (subtype
);
2078 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype
)))
2079 register_odr_type (TYPE_MAIN_VARIANT (subtype
));
2081 if (TYPE_BINFO (type
))
2082 for (unsigned int i
= 0;
2083 i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
2084 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2085 (TYPE_BINFO (type
), i
)));
2087 get_odr_type (type
, true);
2091 /* Return true if type is known to have no derivations. */
2094 type_known_to_have_no_derivations_p (tree t
)
2096 return (type_all_derivations_known_p (t
)
2097 && (TYPE_FINAL_P (t
)
2099 && !get_odr_type (t
, true)->derived_types
.length())));
2102 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2103 recursive printing. */
2106 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
2109 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
2110 print_generic_expr (f
, t
->type
, TDF_SLIM
);
2111 fprintf (f
, "%s", t
->anonymous_namespace
? " (anonymous namespace)":"");
2112 fprintf (f
, "%s\n", t
->all_derivations_known
? " (derivations known)":"");
2113 if (TYPE_NAME (t
->type
))
2115 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
->type
)))
2116 fprintf (f
, "%*s mangled name: %s\n", indent
* 2, "",
2118 (DECL_ASSEMBLER_NAME (TYPE_NAME (t
->type
))));
2120 if (t
->bases
.length ())
2122 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
2123 for (i
= 0; i
< t
->bases
.length (); i
++)
2124 fprintf (f
, " %i", t
->bases
[i
]->id
);
2127 if (t
->derived_types
.length ())
2129 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
2130 for (i
= 0; i
< t
->derived_types
.length (); i
++)
2131 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
2136 /* Dump the type inheritance graph. */
2139 dump_type_inheritance_graph (FILE *f
)
2142 unsigned int num_all_types
= 0, num_types
= 0, num_duplicates
= 0;
2145 fprintf (f
, "\n\nType inheritance graph:\n");
2146 for (i
= 0; i
< odr_types
.length (); i
++)
2148 if (odr_types
[i
] && odr_types
[i
]->bases
.length () == 0)
2149 dump_odr_type (f
, odr_types
[i
]);
2151 for (i
= 0; i
< odr_types
.length (); i
++)
2157 if (!odr_types
[i
]->types
|| !odr_types
[i
]->types
->length ())
2160 /* To aid ODR warnings we also mangle integer constants but do
2161 not consider duplicates there. */
2162 if (TREE_CODE (odr_types
[i
]->type
) == INTEGER_TYPE
)
2165 /* It is normal to have one duplicate and one normal variant. */
2166 if (odr_types
[i
]->types
->length () == 1
2167 && COMPLETE_TYPE_P (odr_types
[i
]->type
)
2168 && !COMPLETE_TYPE_P ((*odr_types
[i
]->types
)[0]))
2174 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
2175 print_node (f
, "", odr_types
[i
]->type
, 0);
2176 print_node (f
, "", TYPE_NAME (odr_types
[i
]->type
), 0);
2178 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
2182 fprintf (f
, "duplicate #%i\n", j
);
2183 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
2184 t
= (*odr_types
[i
]->types
)[j
];
2185 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
2187 t
= TYPE_CONTEXT (t
);
2188 print_node (f
, "", t
, 0);
2190 print_node (f
, "", TYPE_NAME ((*odr_types
[i
]->types
)[j
]), 0);
2194 fprintf (f
, "Out of %i types there are %i types with duplicates; "
2195 "%i duplicates overall\n", num_all_types
, num_types
, num_duplicates
);
2198 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2200 We free TYPE_VALUES of enums and also make TYPE_DECLs to not point back
2201 to the type (which is needed to keep them in the same SCC and preserve
2202 location information to output warnings) and subsequently we make all
2203 TYPE_DECLS of same assembler name equivalent. */
2206 free_odr_warning_data ()
2208 static bool odr_data_freed
= false;
2210 if (odr_data_freed
|| !flag_wpa
|| !odr_types_ptr
)
2213 odr_data_freed
= true;
2215 for (unsigned int i
= 0; i
< odr_types
.length (); i
++)
2218 tree t
= odr_types
[i
]->type
;
2220 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
2221 TYPE_VALUES (t
) = NULL
;
2222 TREE_TYPE (TYPE_NAME (t
)) = void_type_node
;
2224 if (odr_types
[i
]->types
)
2225 for (unsigned int j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
2227 tree td
= (*odr_types
[i
]->types
)[j
];
2229 if (TREE_CODE (td
) == ENUMERAL_TYPE
)
2230 TYPE_VALUES (td
) = NULL
;
2231 TYPE_NAME (td
) = TYPE_NAME (t
);
2234 odr_data_freed
= true;
2237 /* Initialize IPA devirt and build inheritance tree graph. */
2240 build_type_inheritance_graph (void)
2242 struct symtab_node
*n
;
2243 FILE *inheritance_dump_file
;
2248 free_odr_warning_data ();
2251 timevar_push (TV_IPA_INHERITANCE
);
2252 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
2253 odr_hash
= new odr_hash_type (23);
2255 /* We reconstruct the graph starting of types of all methods seen in the
2258 if (is_a
<cgraph_node
*> (n
)
2259 && DECL_VIRTUAL_P (n
->decl
)
2260 && n
->real_symbol_p ())
2261 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
2263 /* Look also for virtual tables of types that do not define any methods.
2265 We need it in a case where class B has virtual base of class A
2266 re-defining its virtual method and there is class C with no virtual
2267 methods with B as virtual base.
2269 Here we output B's virtual method in two variant - for non-virtual
2270 and virtual inheritance. B's virtual table has non-virtual version,
2271 while C's has virtual.
2273 For this reason we need to know about C in order to include both
2274 variants of B. More correctly, record_target_from_binfo should
2275 add both variants of the method when walking B, but we have no
2276 link in between them.
2278 We rely on fact that either the method is exported and thus we
2279 assume it is called externally or C is in anonymous namespace and
2280 thus we will see the vtable. */
2282 else if (is_a
<varpool_node
*> (n
)
2283 && DECL_VIRTUAL_P (n
->decl
)
2284 && TREE_CODE (DECL_CONTEXT (n
->decl
)) == RECORD_TYPE
2285 && TYPE_BINFO (DECL_CONTEXT (n
->decl
))
2286 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n
->decl
))))
2287 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n
->decl
)), true);
2288 if (inheritance_dump_file
)
2290 dump_type_inheritance_graph (inheritance_dump_file
);
2291 dump_end (TDI_inheritance
, inheritance_dump_file
);
2293 free_odr_warning_data ();
2294 timevar_pop (TV_IPA_INHERITANCE
);
2297 /* Return true if N has reference from live virtual table
2298 (and thus can be a destination of polymorphic call).
2299 Be conservatively correct when callgraph is not built or
2300 if the method may be referred externally. */
2303 referenced_from_vtable_p (struct cgraph_node
*node
)
2306 struct ipa_ref
*ref
;
2309 if (node
->externally_visible
2310 || DECL_EXTERNAL (node
->decl
)
2311 || node
->used_from_other_partition
)
2314 /* Keep this test constant time.
2315 It is unlikely this can happen except for the case where speculative
2316 devirtualization introduced many speculative edges to this node.
2317 In this case the target is very likely alive anyway. */
2318 if (node
->ref_list
.referring
.length () > 100)
2321 /* We need references built. */
2322 if (symtab
->state
<= CONSTRUCTION
)
2325 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
2326 if ((ref
->use
== IPA_REF_ALIAS
2327 && referenced_from_vtable_p (dyn_cast
<cgraph_node
*> (ref
->referring
)))
2328 || (ref
->use
== IPA_REF_ADDR
2329 && VAR_P (ref
->referring
->decl
)
2330 && DECL_VIRTUAL_P (ref
->referring
->decl
)))
2338 /* Return if TARGET is cxa_pure_virtual. */
2341 is_cxa_pure_virtual_p (tree target
)
2343 return target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
2344 && DECL_NAME (target
)
2345 && id_equal (DECL_NAME (target
),
2346 "__cxa_pure_virtual");
2349 /* If TARGET has associated node, record it in the NODES array.
2350 CAN_REFER specify if program can refer to the target directly.
2351 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2352 its body was already removed and there is no way to refer to it), clear
2356 maybe_record_node (vec
<cgraph_node
*> &nodes
,
2357 tree target
, hash_set
<tree
> *inserted
,
2361 struct cgraph_node
*target_node
, *alias_target
;
2362 enum availability avail
;
2363 bool pure_virtual
= is_cxa_pure_virtual_p (target
);
2365 /* __builtin_unreachable do not need to be added into
2366 list of targets; the runtime effect of calling them is undefined.
2367 Only "real" virtual methods should be accounted. */
2368 if (target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
&& !pure_virtual
)
2373 /* The only case when method of anonymous namespace becomes unreferable
2374 is when we completely optimized it out. */
2377 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
2385 target_node
= cgraph_node::get (target
);
2387 /* Prefer alias target over aliases, so we do not get confused by
2391 alias_target
= target_node
->ultimate_alias_target (&avail
);
2392 if (target_node
!= alias_target
2393 && avail
>= AVAIL_AVAILABLE
2394 && target_node
->get_availability ())
2395 target_node
= alias_target
;
2398 /* Method can only be called by polymorphic call if any
2399 of vtables referring to it are alive.
2401 While this holds for non-anonymous functions, too, there are
2402 cases where we want to keep them in the list; for example
2403 inline functions with -fno-weak are static, but we still
2404 may devirtualize them when instance comes from other unit.
2405 The same holds for LTO.
2407 Currently we ignore these functions in speculative devirtualization.
2408 ??? Maybe it would make sense to be more aggressive for LTO even
2412 && type_in_anonymous_namespace_p (DECL_CONTEXT (target
))
2414 || !referenced_from_vtable_p (target_node
)))
2416 /* See if TARGET is useful function we can deal with. */
2417 else if (target_node
!= NULL
2418 && (TREE_PUBLIC (target
)
2419 || DECL_EXTERNAL (target
)
2420 || target_node
->definition
)
2421 && target_node
->real_symbol_p ())
2423 gcc_assert (!target_node
->inlined_to
);
2424 gcc_assert (target_node
->real_symbol_p ());
2425 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2426 by valid program. */
2427 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2429 /* Only add pure virtual if it is the only possible target. This way
2430 we will preserve the diagnostics about pure virtual called in many
2431 cases without disabling optimization in other. */
2432 else if (pure_virtual
)
2434 if (nodes
.length ())
2437 /* If we found a real target, take away cxa_pure_virtual. */
2438 else if (!pure_virtual
&& nodes
.length () == 1
2439 && is_cxa_pure_virtual_p (nodes
[0]->decl
))
2441 if (pure_virtual
&& nodes
.length ())
2443 if (!inserted
->add (target
))
2445 cached_polymorphic_call_targets
->add (target_node
);
2446 nodes
.safe_push (target_node
);
2449 else if (!completep
)
2451 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2452 optimized out or partitioned to other unit) so we cannot add it. When
2453 not sanitizing, there is nothing to do.
2454 Otherwise declare the list incomplete. */
2455 else if (pure_virtual
)
2457 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2460 else if (flag_ltrans
2461 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
2465 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2466 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2467 method in vtable and insert method to NODES array
2468 or BASES_TO_CONSIDER if this array is non-NULL.
2469 Otherwise recurse to base BINFOs.
2470 This matches what get_binfo_at_offset does, but with offset
2473 TYPE_BINFOS is a stack of BINFOS of types with defined
2474 virtual table seen on way from class type to BINFO.
2476 MATCHED_VTABLES tracks virtual tables we already did lookup
2477 for virtual function in. INSERTED tracks nodes we already
2480 ANONYMOUS is true if BINFO is part of anonymous namespace.
2482 Clear COMPLETEP when we hit unreferable target.
2486 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
2487 vec
<tree
> *bases_to_consider
,
2490 vec
<tree
> &type_binfos
,
2491 HOST_WIDE_INT otr_token
,
2493 HOST_WIDE_INT offset
,
2494 hash_set
<tree
> *inserted
,
2495 hash_set
<tree
> *matched_vtables
,
2499 tree type
= BINFO_TYPE (binfo
);
2504 if (BINFO_VTABLE (binfo
))
2505 type_binfos
.safe_push (binfo
);
2506 if (types_same_for_odr (type
, outer_type
))
2509 tree type_binfo
= NULL
;
2511 /* Look up BINFO with virtual table. For normal types it is always last
2513 for (i
= type_binfos
.length () - 1; i
>= 0; i
--)
2514 if (BINFO_OFFSET (type_binfos
[i
]) == BINFO_OFFSET (binfo
))
2516 type_binfo
= type_binfos
[i
];
2519 if (BINFO_VTABLE (binfo
))
2521 /* If this is duplicated BINFO for base shared by virtual inheritance,
2522 we may not have its associated vtable. This is not a problem, since
2523 we will walk it on the other path. */
2526 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
2530 gcc_assert (odr_violation_reported
);
2533 /* For types in anonymous namespace first check if the respective vtable
2534 is alive. If not, we know the type can't be called. */
2535 if (!flag_ltrans
&& anonymous
)
2537 tree vtable
= BINFO_VTABLE (inner_binfo
);
2538 varpool_node
*vnode
;
2540 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
2541 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
2542 vnode
= varpool_node::get (vtable
);
2543 if (!vnode
|| !vnode
->definition
)
2546 gcc_assert (inner_binfo
);
2547 if (bases_to_consider
2548 ? !matched_vtables
->contains (BINFO_VTABLE (inner_binfo
))
2549 : !matched_vtables
->add (BINFO_VTABLE (inner_binfo
)))
2552 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2555 if (!bases_to_consider
)
2556 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2557 /* Destructors are never called via construction vtables. */
2558 else if (!target
|| !DECL_CXX_DESTRUCTOR_P (target
))
2559 bases_to_consider
->safe_push (target
);
2565 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2566 /* Walking bases that have no virtual method is pointless exercise. */
2567 if (polymorphic_type_binfo_p (base_binfo
))
2568 record_target_from_binfo (nodes
, bases_to_consider
, base_binfo
, otr_type
,
2570 otr_token
, outer_type
, offset
, inserted
,
2571 matched_vtables
, anonymous
, completep
);
2572 if (BINFO_VTABLE (binfo
))
2576 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2577 of TYPE, insert them to NODES, recurse into derived nodes.
2578 INSERTED is used to avoid duplicate insertions of methods into NODES.
2579 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2580 Clear COMPLETEP if unreferable target is found.
2582 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2583 all cases where BASE_SKIPPED is true (because the base is abstract
2587 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
2588 hash_set
<tree
> *inserted
,
2589 hash_set
<tree
> *matched_vtables
,
2592 HOST_WIDE_INT otr_token
,
2594 HOST_WIDE_INT offset
,
2596 vec
<tree
> &bases_to_consider
,
2597 bool consider_construction
)
2599 tree binfo
= TYPE_BINFO (type
->type
);
2601 auto_vec
<tree
, 8> type_binfos
;
2602 bool possibly_instantiated
= type_possibly_instantiated_p (type
->type
);
2604 /* We may need to consider types w/o instances because of possible derived
2605 types using their methods either directly or via construction vtables.
2606 We are safe to skip them when all derivations are known, since we will
2608 This is done by recording them to BASES_TO_CONSIDER array. */
2609 if (possibly_instantiated
|| consider_construction
)
2611 record_target_from_binfo (nodes
,
2612 (!possibly_instantiated
2613 && type_all_derivations_known_p (type
->type
))
2614 ? &bases_to_consider
: NULL
,
2615 binfo
, otr_type
, type_binfos
, otr_token
,
2617 inserted
, matched_vtables
,
2618 type
->anonymous_namespace
, completep
);
2620 for (i
= 0; i
< type
->derived_types
.length (); i
++)
2621 possible_polymorphic_call_targets_1 (nodes
, inserted
,
2624 type
->derived_types
[i
],
2625 otr_token
, outer_type
, offset
, completep
,
2626 bases_to_consider
, consider_construction
);
2629 /* Cache of queries for polymorphic call targets.
2631 Enumerating all call targets may get expensive when there are many
2632 polymorphic calls in the program, so we memoize all the previous
2633 queries and avoid duplicated work. */
2635 class polymorphic_call_target_d
2638 HOST_WIDE_INT otr_token
;
2639 ipa_polymorphic_call_context context
;
2641 vec
<cgraph_node
*> targets
;
2644 unsigned int n_odr_types
;
2649 /* Polymorphic call target cache helpers. */
2651 struct polymorphic_call_target_hasher
2652 : pointer_hash
<polymorphic_call_target_d
>
2654 static inline hashval_t
hash (const polymorphic_call_target_d
*);
2655 static inline bool equal (const polymorphic_call_target_d
*,
2656 const polymorphic_call_target_d
*);
2657 static inline void remove (polymorphic_call_target_d
*);
2660 /* Return the computed hashcode for ODR_QUERY. */
2663 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d
*odr_query
)
2665 inchash::hash
hstate (odr_query
->otr_token
);
2667 hstate
.add_hwi (odr_query
->type
->id
);
2668 hstate
.merge_hash (TYPE_UID (odr_query
->context
.outer_type
));
2669 hstate
.add_hwi (odr_query
->context
.offset
);
2670 hstate
.add_hwi (odr_query
->n_odr_types
);
2672 if (odr_query
->context
.speculative_outer_type
)
2674 hstate
.merge_hash (TYPE_UID (odr_query
->context
.speculative_outer_type
));
2675 hstate
.add_hwi (odr_query
->context
.speculative_offset
);
2677 hstate
.add_flag (odr_query
->speculative
);
2678 hstate
.add_flag (odr_query
->context
.maybe_in_construction
);
2679 hstate
.add_flag (odr_query
->context
.maybe_derived_type
);
2680 hstate
.add_flag (odr_query
->context
.speculative_maybe_derived_type
);
2681 hstate
.commit_flag ();
2682 return hstate
.end ();
2685 /* Compare cache entries T1 and T2. */
2688 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d
*t1
,
2689 const polymorphic_call_target_d
*t2
)
2691 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
2692 && t1
->speculative
== t2
->speculative
2693 && t1
->context
.offset
== t2
->context
.offset
2694 && t1
->context
.speculative_offset
== t2
->context
.speculative_offset
2695 && t1
->context
.outer_type
== t2
->context
.outer_type
2696 && t1
->context
.speculative_outer_type
== t2
->context
.speculative_outer_type
2697 && t1
->context
.maybe_in_construction
2698 == t2
->context
.maybe_in_construction
2699 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
2700 && (t1
->context
.speculative_maybe_derived_type
2701 == t2
->context
.speculative_maybe_derived_type
)
2702 /* Adding new type may affect outcome of target search. */
2703 && t1
->n_odr_types
== t2
->n_odr_types
);
2706 /* Remove entry in polymorphic call target cache hash. */
2709 polymorphic_call_target_hasher::remove (polymorphic_call_target_d
*v
)
2711 v
->targets
.release ();
2715 /* Polymorphic call target query cache. */
2717 typedef hash_table
<polymorphic_call_target_hasher
>
2718 polymorphic_call_target_hash_type
;
2719 static polymorphic_call_target_hash_type
*polymorphic_call_target_hash
;
2721 /* Destroy polymorphic call target query cache. */
2724 free_polymorphic_call_targets_hash ()
2726 if (cached_polymorphic_call_targets
)
2728 delete polymorphic_call_target_hash
;
2729 polymorphic_call_target_hash
= NULL
;
2730 delete cached_polymorphic_call_targets
;
2731 cached_polymorphic_call_targets
= NULL
;
2735 /* Force rebuilding type inheritance graph from scratch.
2736 This is use to make sure that we do not keep references to types
2737 which was not visible to free_lang_data. */
2740 rebuild_type_inheritance_graph ()
2746 odr_types_ptr
= NULL
;
2747 free_polymorphic_call_targets_hash ();
2750 /* When virtual function is removed, we may need to flush the cache. */
2753 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
2755 if (cached_polymorphic_call_targets
2757 && cached_polymorphic_call_targets
->contains (n
))
2758 free_polymorphic_call_targets_hash ();
2761 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2764 subbinfo_with_vtable_at_offset (tree binfo
, unsigned HOST_WIDE_INT offset
,
2767 tree v
= BINFO_VTABLE (binfo
);
2770 unsigned HOST_WIDE_INT this_offset
;
2774 if (!vtable_pointer_value_to_vtable (v
, &v
, &this_offset
))
2777 if (offset
== this_offset
2778 && DECL_ASSEMBLER_NAME (v
) == DECL_ASSEMBLER_NAME (vtable
))
2782 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2783 if (polymorphic_type_binfo_p (base_binfo
))
2785 base_binfo
= subbinfo_with_vtable_at_offset (base_binfo
, offset
, vtable
);
2792 /* T is known constant value of virtual table pointer.
2793 Store virtual table to V and its offset to OFFSET.
2794 Return false if T does not look like virtual table reference. */
2797 vtable_pointer_value_to_vtable (const_tree t
, tree
*v
,
2798 unsigned HOST_WIDE_INT
*offset
)
2800 /* We expect &MEM[(void *)&virtual_table + 16B].
2801 We obtain object's BINFO from the context of the virtual table.
2802 This one contains pointer to virtual table represented via
2803 POINTER_PLUS_EXPR. Verify that this pointer matches what
2804 we propagated through.
2806 In the case of virtual inheritance, the virtual tables may
2807 be nested, i.e. the offset may be different from 16 and we may
2808 need to dive into the type representation. */
2809 if (TREE_CODE (t
) == ADDR_EXPR
2810 && TREE_CODE (TREE_OPERAND (t
, 0)) == MEM_REF
2811 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == ADDR_EXPR
2812 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
2813 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0))
2815 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2816 (TREE_OPERAND (t
, 0), 0), 0)))
2818 *v
= TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0);
2819 *offset
= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t
, 0), 1));
2823 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2824 We need to handle it when T comes from static variable initializer or
2826 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
2828 *offset
= tree_to_uhwi (TREE_OPERAND (t
, 1));
2829 t
= TREE_OPERAND (t
, 0);
2834 if (TREE_CODE (t
) != ADDR_EXPR
)
2836 *v
= TREE_OPERAND (t
, 0);
2840 /* T is known constant value of virtual table pointer. Return BINFO of the
2844 vtable_pointer_value_to_binfo (const_tree t
)
2847 unsigned HOST_WIDE_INT offset
;
2849 if (!vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2852 /* FIXME: for stores of construction vtables we return NULL,
2853 because we do not have BINFO for those. Eventually we should fix
2854 our representation to allow this case to be handled, too.
2855 In the case we see store of BINFO we however may assume
2856 that standard folding will be able to cope with it. */
2857 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
2861 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2862 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2863 and insert them in NODES.
2865 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2868 record_targets_from_bases (tree otr_type
,
2869 HOST_WIDE_INT otr_token
,
2871 HOST_WIDE_INT offset
,
2872 vec
<cgraph_node
*> &nodes
,
2873 hash_set
<tree
> *inserted
,
2874 hash_set
<tree
> *matched_vtables
,
2879 HOST_WIDE_INT pos
, size
;
2883 if (types_same_for_odr (outer_type
, otr_type
))
2886 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
2888 if (TREE_CODE (fld
) != FIELD_DECL
)
2891 pos
= int_bit_position (fld
);
2892 size
= tree_to_shwi (DECL_SIZE (fld
));
2893 if (pos
<= offset
&& (pos
+ size
) > offset
2894 /* Do not get confused by zero sized bases. */
2895 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld
))))
2898 /* Within a class type we should always find corresponding fields. */
2899 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
2901 /* Nonbase types should have been stripped by outer_class_type. */
2902 gcc_assert (DECL_ARTIFICIAL (fld
));
2904 outer_type
= TREE_TYPE (fld
);
2907 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
2911 gcc_assert (odr_violation_reported
);
2914 gcc_assert (base_binfo
);
2915 if (!matched_vtables
->add (BINFO_VTABLE (base_binfo
)))
2918 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2921 if (!target
|| ! DECL_CXX_DESTRUCTOR_P (target
))
2922 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2923 matched_vtables
->add (BINFO_VTABLE (base_binfo
));
2928 /* When virtual table is removed, we may need to flush the cache. */
2931 devirt_variable_node_removal_hook (varpool_node
*n
,
2932 void *d ATTRIBUTE_UNUSED
)
2934 if (cached_polymorphic_call_targets
2935 && DECL_VIRTUAL_P (n
->decl
)
2936 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
2937 free_polymorphic_call_targets_hash ();
2940 /* Record about how many calls would benefit from given type to be final. */
2942 struct odr_type_warn_count
2946 profile_count dyn_count
;
2949 /* Record about how many calls would benefit from given method to be final. */
2951 struct decl_warn_count
2955 profile_count dyn_count
;
2958 /* Information about type and decl warnings. */
2960 class final_warning_record
2963 /* If needed grow type_warnings vector and initialize new decl_warn_count
2964 to have dyn_count set to profile_count::zero (). */
2965 void grow_type_warnings (unsigned newlen
);
2967 profile_count dyn_count
;
2968 auto_vec
<odr_type_warn_count
> type_warnings
;
2969 hash_map
<tree
, decl_warn_count
> decl_warnings
;
2973 final_warning_record::grow_type_warnings (unsigned newlen
)
2975 unsigned len
= type_warnings
.length ();
2978 type_warnings
.safe_grow_cleared (newlen
);
2979 for (unsigned i
= len
; i
< newlen
; i
++)
2980 type_warnings
[i
].dyn_count
= profile_count::zero ();
2984 class final_warning_record
*final_warning_records
;
2986 /* Return vector containing possible targets of polymorphic call of type
2987 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2988 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2989 OTR_TYPE and include their virtual method. This is useful for types
2990 possibly in construction or destruction where the virtual table may
2991 temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
2992 us to walk the inheritance graph for all derivations.
2994 If COMPLETEP is non-NULL, store true if the list is complete.
2995 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2996 in the target cache. If user needs to visit every target list
2997 just once, it can memoize them.
2999 If SPECULATIVE is set, the list will not contain targets that
3000 are not speculatively taken.
3002 Returned vector is placed into cache. It is NOT caller's responsibility
3003 to free it. The vector can be freed on cgraph_remove_node call if
3004 the particular node is a virtual function present in the cache. */
3007 possible_polymorphic_call_targets (tree otr_type
,
3008 HOST_WIDE_INT otr_token
,
3009 ipa_polymorphic_call_context context
,
3014 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
3015 vec
<cgraph_node
*> nodes
= vNULL
;
3016 auto_vec
<tree
, 8> bases_to_consider
;
3017 odr_type type
, outer_type
;
3018 polymorphic_call_target_d key
;
3019 polymorphic_call_target_d
**slot
;
3023 bool can_refer
= false;
3024 bool skipped
= false;
3026 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
3028 /* If ODR is not initialized or the context is invalid, return empty
3030 if (!odr_hash
|| context
.invalid
|| !TYPE_BINFO (otr_type
))
3033 *completep
= context
.invalid
;
3035 *cache_token
= NULL
;
3039 /* Do not bother to compute speculative info when user do not asks for it. */
3040 if (!speculative
|| !context
.speculative_outer_type
)
3041 context
.clear_speculation ();
3043 type
= get_odr_type (otr_type
, true);
3045 /* Recording type variants would waste results cache. */
3046 gcc_assert (!context
.outer_type
3047 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3049 /* Look up the outer class type we want to walk.
3050 If we fail to do so, the context is invalid. */
3051 if ((context
.outer_type
|| context
.speculative_outer_type
)
3052 && !context
.restrict_to_inner_class (otr_type
))
3057 *cache_token
= NULL
;
3060 gcc_assert (!context
.invalid
);
3062 /* Check that restrict_to_inner_class kept the main variant. */
3063 gcc_assert (!context
.outer_type
3064 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3066 /* We canonicalize our query, so we do not need extra hashtable entries. */
3068 /* Without outer type, we have no use for offset. Just do the
3069 basic search from inner type. */
3070 if (!context
.outer_type
)
3071 context
.clear_outer_type (otr_type
);
3072 /* We need to update our hierarchy if the type does not exist. */
3073 outer_type
= get_odr_type (context
.outer_type
, true);
3074 /* If the type is complete, there are no derivations. */
3075 if (TYPE_FINAL_P (outer_type
->type
))
3076 context
.maybe_derived_type
= false;
3078 /* Initialize query cache. */
3079 if (!cached_polymorphic_call_targets
)
3081 cached_polymorphic_call_targets
= new hash_set
<cgraph_node
*>;
3082 polymorphic_call_target_hash
3083 = new polymorphic_call_target_hash_type (23);
3084 if (!node_removal_hook_holder
)
3086 node_removal_hook_holder
=
3087 symtab
->add_cgraph_removal_hook (&devirt_node_removal_hook
, NULL
);
3088 symtab
->add_varpool_removal_hook (&devirt_variable_node_removal_hook
,
3095 if (context
.outer_type
!= otr_type
)
3097 = get_odr_type (context
.outer_type
, true)->type
;
3098 if (context
.speculative_outer_type
)
3099 context
.speculative_outer_type
3100 = get_odr_type (context
.speculative_outer_type
, true)->type
;
3103 /* Look up cached answer. */
3105 key
.otr_token
= otr_token
;
3106 key
.speculative
= speculative
;
3107 key
.context
= context
;
3108 key
.n_odr_types
= odr_types
.length ();
3109 slot
= polymorphic_call_target_hash
->find_slot (&key
, INSERT
);
3111 *cache_token
= (void *)*slot
;
3115 *completep
= (*slot
)->complete
;
3116 if ((*slot
)->type_warning
&& final_warning_records
)
3118 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].count
++;
3119 if (!final_warning_records
->type_warnings
3120 [(*slot
)->type_warning
- 1].dyn_count
.initialized_p ())
3121 final_warning_records
->type_warnings
3122 [(*slot
)->type_warning
- 1].dyn_count
= profile_count::zero ();
3123 if (final_warning_records
->dyn_count
> 0)
3124 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].dyn_count
3125 = final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].dyn_count
3126 + final_warning_records
->dyn_count
;
3128 if (!speculative
&& (*slot
)->decl_warning
&& final_warning_records
)
3130 struct decl_warn_count
*c
=
3131 final_warning_records
->decl_warnings
.get ((*slot
)->decl_warning
);
3133 if (final_warning_records
->dyn_count
> 0)
3134 c
->dyn_count
+= final_warning_records
->dyn_count
;
3136 return (*slot
)->targets
;
3141 /* Do actual search. */
3142 timevar_push (TV_IPA_VIRTUAL_CALL
);
3143 *slot
= XCNEW (polymorphic_call_target_d
);
3145 *cache_token
= (void *)*slot
;
3146 (*slot
)->type
= type
;
3147 (*slot
)->otr_token
= otr_token
;
3148 (*slot
)->context
= context
;
3149 (*slot
)->speculative
= speculative
;
3151 hash_set
<tree
> inserted
;
3152 hash_set
<tree
> matched_vtables
;
3154 /* First insert targets we speculatively identified as likely. */
3155 if (context
.speculative_outer_type
)
3157 odr_type speculative_outer_type
;
3158 bool speculation_complete
= true;
3160 /* First insert target from type itself and check if it may have
3162 speculative_outer_type
= get_odr_type (context
.speculative_outer_type
, true);
3163 if (TYPE_FINAL_P (speculative_outer_type
->type
))
3164 context
.speculative_maybe_derived_type
= false;
3165 binfo
= get_binfo_at_offset (TYPE_BINFO (speculative_outer_type
->type
),
3166 context
.speculative_offset
, otr_type
);
3168 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3173 /* In the case we get complete method, we don't need
3174 to walk derivations. */
3175 if (target
&& DECL_FINAL_P (target
))
3176 context
.speculative_maybe_derived_type
= false;
3177 if (type_possibly_instantiated_p (speculative_outer_type
->type
))
3178 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &speculation_complete
);
3180 matched_vtables
.add (BINFO_VTABLE (binfo
));
3183 /* Next walk recursively all derived types. */
3184 if (context
.speculative_maybe_derived_type
)
3185 for (i
= 0; i
< speculative_outer_type
->derived_types
.length(); i
++)
3186 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3189 speculative_outer_type
->derived_types
[i
],
3190 otr_token
, speculative_outer_type
->type
,
3191 context
.speculative_offset
,
3192 &speculation_complete
,
3197 if (!speculative
|| !nodes
.length ())
3199 /* First see virtual method of type itself. */
3200 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
3201 context
.offset
, otr_type
);
3203 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3207 gcc_assert (odr_violation_reported
);
3211 /* Destructors are never called through construction virtual tables,
3212 because the type is always known. */
3213 if (target
&& DECL_CXX_DESTRUCTOR_P (target
))
3214 context
.maybe_in_construction
= false;
3218 /* In the case we get complete method, we don't need
3219 to walk derivations. */
3220 if (DECL_FINAL_P (target
))
3221 context
.maybe_derived_type
= false;
3224 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3225 if (type_possibly_instantiated_p (outer_type
->type
))
3226 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3231 matched_vtables
.add (BINFO_VTABLE (binfo
));
3233 /* Next walk recursively all derived types. */
3234 if (context
.maybe_derived_type
)
3236 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
3237 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3240 outer_type
->derived_types
[i
],
3241 otr_token
, outer_type
->type
,
3242 context
.offset
, &complete
,
3244 context
.maybe_in_construction
);
3246 if (!outer_type
->all_derivations_known
)
3248 if (!speculative
&& final_warning_records
3249 && nodes
.length () == 1
3250 && TREE_CODE (TREE_TYPE (nodes
[0]->decl
)) == METHOD_TYPE
)
3253 && warn_suggest_final_types
3254 && !outer_type
->derived_types
.length ())
3256 final_warning_records
->grow_type_warnings
3258 final_warning_records
->type_warnings
[outer_type
->id
].count
++;
3259 if (!final_warning_records
->type_warnings
3260 [outer_type
->id
].dyn_count
.initialized_p ())
3261 final_warning_records
->type_warnings
3262 [outer_type
->id
].dyn_count
= profile_count::zero ();
3263 final_warning_records
->type_warnings
[outer_type
->id
].dyn_count
3264 += final_warning_records
->dyn_count
;
3265 final_warning_records
->type_warnings
[outer_type
->id
].type
3267 (*slot
)->type_warning
= outer_type
->id
+ 1;
3270 && warn_suggest_final_methods
3271 && types_same_for_odr (DECL_CONTEXT (nodes
[0]->decl
),
3275 struct decl_warn_count
&c
=
3276 final_warning_records
->decl_warnings
.get_or_insert
3277 (nodes
[0]->decl
, &existed
);
3282 c
.dyn_count
+= final_warning_records
->dyn_count
;
3287 c
.dyn_count
= final_warning_records
->dyn_count
;
3288 c
.decl
= nodes
[0]->decl
;
3290 (*slot
)->decl_warning
= nodes
[0]->decl
;
3299 /* Destructors are never called through construction virtual tables,
3300 because the type is always known. One of entries may be
3301 cxa_pure_virtual so look to at least two of them. */
3302 if (context
.maybe_in_construction
)
3303 for (i
=0 ; i
< MIN (nodes
.length (), 2); i
++)
3304 if (DECL_CXX_DESTRUCTOR_P (nodes
[i
]->decl
))
3305 context
.maybe_in_construction
= false;
3306 if (context
.maybe_in_construction
)
3308 if (type
!= outer_type
3310 || (context
.maybe_derived_type
3311 && !type_all_derivations_known_p (outer_type
->type
))))
3312 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
3313 context
.offset
, nodes
, &inserted
,
3314 &matched_vtables
, &complete
);
3316 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3317 for (i
= 0; i
< bases_to_consider
.length(); i
++)
3318 maybe_record_node (nodes
, bases_to_consider
[i
], &inserted
, can_refer
, &complete
);
3323 (*slot
)->targets
= nodes
;
3324 (*slot
)->complete
= complete
;
3325 (*slot
)->n_odr_types
= odr_types
.length ();
3327 *completep
= complete
;
3329 timevar_pop (TV_IPA_VIRTUAL_CALL
);
3334 add_decl_warning (const tree
&key ATTRIBUTE_UNUSED
, const decl_warn_count
&value
,
3335 vec
<const decl_warn_count
*> *vec
)
3337 vec
->safe_push (&value
);
3341 /* Dump target list TARGETS into FILE. */
3344 dump_targets (FILE *f
, vec
<cgraph_node
*> targets
, bool verbose
)
3348 for (i
= 0; i
< targets
.length (); i
++)
3352 name
= cplus_demangle_v3 (targets
[i
]->asm_name (), 0);
3353 fprintf (f
, " %s/%i", name
? name
: targets
[i
]->name (),
3357 if (!targets
[i
]->definition
)
3358 fprintf (f
, " (no definition%s)",
3359 DECL_DECLARED_INLINE_P (targets
[i
]->decl
)
3361 /* With many targets for every call polymorphic dumps are going to
3362 be quadratic in size. */
3363 if (i
> 10 && !verbose
)
3365 fprintf (f
, " ... and %i more targets\n", targets
.length () - i
);
3372 /* Dump all possible targets of a polymorphic call. */
3375 dump_possible_polymorphic_call_targets (FILE *f
,
3377 HOST_WIDE_INT otr_token
,
3378 const ipa_polymorphic_call_context
&ctx
,
3381 vec
<cgraph_node
*> targets
;
3383 odr_type type
= get_odr_type (TYPE_MAIN_VARIANT (otr_type
), false);
3388 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3390 &final
, NULL
, false);
3391 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
3392 print_generic_expr (f
, type
->type
, TDF_SLIM
);
3393 fprintf (f
, " token %i\n", (int)otr_token
);
3397 fprintf (f
, " %s%s%s%s\n ",
3398 final
? "This is a complete list." :
3399 "This is partial list; extra targets may be defined in other units.",
3400 ctx
.maybe_in_construction
? " (base types included)" : "",
3401 ctx
.maybe_derived_type
? " (derived types included)" : "",
3402 ctx
.speculative_maybe_derived_type
? " (speculative derived types included)" : "");
3403 len
= targets
.length ();
3404 dump_targets (f
, targets
, verbose
);
3406 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3408 &final
, NULL
, true);
3409 if (targets
.length () != len
)
3411 fprintf (f
, " Speculative targets:");
3412 dump_targets (f
, targets
, verbose
);
3414 /* Ugly: during callgraph construction the target cache may get populated
3415 before all targets are found. While this is harmless (because all local
3416 types are discovered and only in those case we devirtualize fully and we
3417 don't do speculative devirtualization before IPA stage) it triggers
3418 assert here when dumping at that stage also populates the case with
3419 speculative targets. Quietly ignore this. */
3420 gcc_assert (symtab
->state
< IPA_SSA
|| targets
.length () <= len
);
3425 /* Return true if N can be possibly target of a polymorphic call of
3426 OTR_TYPE/OTR_TOKEN. */
3429 possible_polymorphic_call_target_p (tree otr_type
,
3430 HOST_WIDE_INT otr_token
,
3431 const ipa_polymorphic_call_context
&ctx
,
3432 struct cgraph_node
*n
)
3434 vec
<cgraph_node
*> targets
;
3438 if (fndecl_built_in_p (n
->decl
, BUILT_IN_UNREACHABLE
)
3439 || fndecl_built_in_p (n
->decl
, BUILT_IN_TRAP
))
3442 if (is_cxa_pure_virtual_p (n
->decl
))
3447 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
3448 for (i
= 0; i
< targets
.length (); i
++)
3449 if (n
->semantically_equivalent_p (targets
[i
]))
3452 /* At a moment we allow middle end to dig out new external declarations
3453 as a targets of polymorphic calls. */
3454 if (!final
&& !n
->definition
)
3461 /* Return true if N can be possibly target of a polymorphic call of
3462 OBJ_TYPE_REF expression REF in STMT. */
3465 possible_polymorphic_call_target_p (tree ref
,
3467 struct cgraph_node
*n
)
3469 ipa_polymorphic_call_context
context (current_function_decl
, ref
, stmt
);
3470 tree call_fn
= gimple_call_fn (stmt
);
3472 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn
),
3474 (OBJ_TYPE_REF_TOKEN (call_fn
)),
3480 /* After callgraph construction new external nodes may appear.
3481 Add them into the graph. */
3484 update_type_inheritance_graph (void)
3486 struct cgraph_node
*n
;
3490 free_polymorphic_call_targets_hash ();
3491 timevar_push (TV_IPA_INHERITANCE
);
3492 /* We reconstruct the graph starting from types of all methods seen in the
3494 FOR_EACH_FUNCTION (n
)
3495 if (DECL_VIRTUAL_P (n
->decl
)
3497 && n
->real_symbol_p ())
3498 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
3499 timevar_pop (TV_IPA_INHERITANCE
);
3503 /* Return true if N looks like likely target of a polymorphic call.
3504 Rule out cxa_pure_virtual, noreturns, function declared cold and
3505 other obvious cases. */
3508 likely_target_p (struct cgraph_node
*n
)
3511 /* cxa_pure_virtual and similar things are not likely. */
3512 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
3514 flags
= flags_from_decl_or_type (n
->decl
);
3515 if (flags
& ECF_NORETURN
)
3517 if (lookup_attribute ("cold",
3518 DECL_ATTRIBUTES (n
->decl
)))
3520 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
3522 /* If there are no live virtual tables referring the target,
3523 the only way the target can be called is an instance coming from other
3524 compilation unit; speculative devirtualization is built around an
3525 assumption that won't happen. */
3526 if (!referenced_from_vtable_p (n
))
3531 /* Compare type warning records P1 and P2 and choose one with larger count;
3532 helper for qsort. */
3535 type_warning_cmp (const void *p1
, const void *p2
)
3537 const odr_type_warn_count
*t1
= (const odr_type_warn_count
*)p1
;
3538 const odr_type_warn_count
*t2
= (const odr_type_warn_count
*)p2
;
3540 if (t1
->dyn_count
< t2
->dyn_count
)
3542 if (t1
->dyn_count
> t2
->dyn_count
)
3544 return t2
->count
- t1
->count
;
3547 /* Compare decl warning records P1 and P2 and choose one with larger count;
3548 helper for qsort. */
3551 decl_warning_cmp (const void *p1
, const void *p2
)
3553 const decl_warn_count
*t1
= *(const decl_warn_count
* const *)p1
;
3554 const decl_warn_count
*t2
= *(const decl_warn_count
* const *)p2
;
3556 if (t1
->dyn_count
< t2
->dyn_count
)
3558 if (t1
->dyn_count
> t2
->dyn_count
)
3560 return t2
->count
- t1
->count
;
3564 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3567 struct cgraph_node
*
3568 try_speculative_devirtualization (tree otr_type
, HOST_WIDE_INT otr_token
,
3569 ipa_polymorphic_call_context ctx
)
3571 vec
<cgraph_node
*>targets
3572 = possible_polymorphic_call_targets
3573 (otr_type
, otr_token
, ctx
, NULL
, NULL
, true);
3575 struct cgraph_node
*likely_target
= NULL
;
3577 for (i
= 0; i
< targets
.length (); i
++)
3578 if (likely_target_p (targets
[i
]))
3582 likely_target
= targets
[i
];
3585 ||!likely_target
->definition
3586 || DECL_EXTERNAL (likely_target
->decl
))
3589 /* Don't use an implicitly-declared destructor (c++/58678). */
3590 struct cgraph_node
*non_thunk_target
3591 = likely_target
->function_symbol ();
3592 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3594 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3595 && likely_target
->can_be_discarded_p ())
3597 return likely_target
;
3600 /* The ipa-devirt pass.
3601 When polymorphic call has only one likely target in the unit,
3602 turn it into a speculative call. */
3607 struct cgraph_node
*n
;
3608 hash_set
<void *> bad_call_targets
;
3609 struct cgraph_edge
*e
;
3611 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
3612 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
3613 int nwrong
= 0, nok
= 0, nexternal
= 0, nartificial
= 0;
3620 dump_type_inheritance_graph (dump_file
);
3622 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3623 This is implemented by setting up final_warning_records that are updated
3624 by get_polymorphic_call_targets.
3625 We need to clear cache in this case to trigger recomputation of all
3627 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3629 final_warning_records
= new (final_warning_record
);
3630 final_warning_records
->dyn_count
= profile_count::zero ();
3631 final_warning_records
->grow_type_warnings (odr_types
.length ());
3632 free_polymorphic_call_targets_hash ();
3635 FOR_EACH_DEFINED_FUNCTION (n
)
3637 bool update
= false;
3638 if (!opt_for_fn (n
->decl
, flag_devirtualize
))
3640 if (dump_file
&& n
->indirect_calls
)
3641 fprintf (dump_file
, "\n\nProcesing function %s\n",
3643 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
3644 if (e
->indirect_info
->polymorphic
)
3646 struct cgraph_node
*likely_target
= NULL
;
3650 if (final_warning_records
)
3651 final_warning_records
->dyn_count
= e
->count
.ipa ();
3653 vec
<cgraph_node
*>targets
3654 = possible_polymorphic_call_targets
3655 (e
, &final
, &cache_token
, true);
3658 /* Trigger warnings by calculating non-speculative targets. */
3659 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3660 possible_polymorphic_call_targets (e
);
3663 dump_possible_polymorphic_call_targets
3664 (dump_file
, e
, (dump_flags
& TDF_DETAILS
));
3668 /* See if the call can be devirtualized by means of ipa-prop's
3669 polymorphic call context propagation. If not, we can just
3670 forget about this call being polymorphic and avoid some heavy
3671 lifting in remove_unreachable_nodes that will otherwise try to
3672 keep all possible targets alive until inlining and in the inliner
3675 This may need to be revisited once we add further ways to use
3676 the may edges, but it is a reasonable thing to do right now. */
3678 if ((e
->indirect_info
->param_index
== -1
3679 || (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
)
3680 && e
->indirect_info
->vptr_changed
))
3681 && !flag_ltrans_devirtualize
)
3683 e
->indirect_info
->polymorphic
= false;
3686 fprintf (dump_file
, "Dropping polymorphic call info;"
3687 " it cannot be used by ipa-prop\n");
3690 if (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
))
3693 if (!e
->maybe_hot_p ())
3696 fprintf (dump_file
, "Call is cold\n\n");
3703 fprintf (dump_file
, "Call is already speculated\n\n");
3706 /* When dumping see if we agree with speculation. */
3710 if (bad_call_targets
.contains (cache_token
))
3713 fprintf (dump_file
, "Target list is known to be useless\n\n");
3717 for (i
= 0; i
< targets
.length (); i
++)
3718 if (likely_target_p (targets
[i
]))
3722 likely_target
= NULL
;
3724 fprintf (dump_file
, "More than one likely target\n\n");
3728 likely_target
= targets
[i
];
3732 bad_call_targets
.add (cache_token
);
3735 /* This is reached only when dumping; check if we agree or disagree
3736 with the speculation. */
3739 struct cgraph_edge
*e2
;
3740 struct ipa_ref
*ref
;
3741 e
->speculative_call_info (e2
, e
, ref
);
3742 if (e2
->callee
->ultimate_alias_target ()
3743 == likely_target
->ultimate_alias_target ())
3745 fprintf (dump_file
, "We agree with speculation\n\n");
3750 fprintf (dump_file
, "We disagree with speculation\n\n");
3755 if (!likely_target
->definition
)
3758 fprintf (dump_file
, "Target is not a definition\n\n");
3762 /* Do not introduce new references to external symbols. While we
3763 can handle these just well, it is common for programs to
3764 incorrectly with headers defining methods they are linked
3766 if (DECL_EXTERNAL (likely_target
->decl
))
3769 fprintf (dump_file
, "Target is external\n\n");
3773 /* Don't use an implicitly-declared destructor (c++/58678). */
3774 struct cgraph_node
*non_thunk_target
3775 = likely_target
->function_symbol ();
3776 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3779 fprintf (dump_file
, "Target is artificial\n\n");
3783 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3784 && likely_target
->can_be_discarded_p ())
3787 fprintf (dump_file
, "Target is overwritable\n\n");
3791 else if (dbg_cnt (devirt
))
3793 if (dump_enabled_p ())
3795 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, e
->call_stmt
,
3796 "speculatively devirtualizing call "
3799 likely_target
->dump_name ());
3801 if (!likely_target
->can_be_discarded_p ())
3804 alias
= dyn_cast
<cgraph_node
*> (likely_target
->noninterposable_alias ());
3806 likely_target
= alias
;
3811 (likely_target
, e
->count
.apply_scale (8, 10));
3815 ipa_update_overall_fn_summary (n
);
3817 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3819 if (warn_suggest_final_types
)
3821 final_warning_records
->type_warnings
.qsort (type_warning_cmp
);
3822 for (unsigned int i
= 0;
3823 i
< final_warning_records
->type_warnings
.length (); i
++)
3824 if (final_warning_records
->type_warnings
[i
].count
)
3826 tree type
= final_warning_records
->type_warnings
[i
].type
;
3827 int count
= final_warning_records
->type_warnings
[i
].count
;
3828 profile_count dyn_count
3829 = final_warning_records
->type_warnings
[i
].dyn_count
;
3831 if (!(dyn_count
> 0))
3832 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3833 OPT_Wsuggest_final_types
, count
,
3834 "Declaring type %qD final "
3835 "would enable devirtualization of %i call",
3836 "Declaring type %qD final "
3837 "would enable devirtualization of %i calls",
3841 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3842 OPT_Wsuggest_final_types
, count
,
3843 "Declaring type %qD final "
3844 "would enable devirtualization of %i call "
3845 "executed %lli times",
3846 "Declaring type %qD final "
3847 "would enable devirtualization of %i calls "
3848 "executed %lli times",
3851 (long long) dyn_count
.to_gcov_type ());
3855 if (warn_suggest_final_methods
)
3857 auto_vec
<const decl_warn_count
*> decl_warnings_vec
;
3859 final_warning_records
->decl_warnings
.traverse
3860 <vec
<const decl_warn_count
*> *, add_decl_warning
> (&decl_warnings_vec
);
3861 decl_warnings_vec
.qsort (decl_warning_cmp
);
3862 for (unsigned int i
= 0; i
< decl_warnings_vec
.length (); i
++)
3864 tree decl
= decl_warnings_vec
[i
]->decl
;
3865 int count
= decl_warnings_vec
[i
]->count
;
3866 profile_count dyn_count
3867 = decl_warnings_vec
[i
]->dyn_count
;
3869 if (!(dyn_count
> 0))
3870 if (DECL_CXX_DESTRUCTOR_P (decl
))
3871 warning_n (DECL_SOURCE_LOCATION (decl
),
3872 OPT_Wsuggest_final_methods
, count
,
3873 "Declaring virtual destructor of %qD final "
3874 "would enable devirtualization of %i call",
3875 "Declaring virtual destructor of %qD final "
3876 "would enable devirtualization of %i calls",
3877 DECL_CONTEXT (decl
), count
);
3879 warning_n (DECL_SOURCE_LOCATION (decl
),
3880 OPT_Wsuggest_final_methods
, count
,
3881 "Declaring method %qD final "
3882 "would enable devirtualization of %i call",
3883 "Declaring method %qD final "
3884 "would enable devirtualization of %i calls",
3886 else if (DECL_CXX_DESTRUCTOR_P (decl
))
3887 warning_n (DECL_SOURCE_LOCATION (decl
),
3888 OPT_Wsuggest_final_methods
, count
,
3889 "Declaring virtual destructor of %qD final "
3890 "would enable devirtualization of %i call "
3891 "executed %lli times",
3892 "Declaring virtual destructor of %qD final "
3893 "would enable devirtualization of %i calls "
3894 "executed %lli times",
3895 DECL_CONTEXT (decl
), count
,
3896 (long long)dyn_count
.to_gcov_type ());
3898 warning_n (DECL_SOURCE_LOCATION (decl
),
3899 OPT_Wsuggest_final_methods
, count
,
3900 "Declaring method %qD final "
3901 "would enable devirtualization of %i call "
3902 "executed %lli times",
3903 "Declaring method %qD final "
3904 "would enable devirtualization of %i calls "
3905 "executed %lli times",
3907 (long long)dyn_count
.to_gcov_type ());
3911 delete (final_warning_records
);
3912 final_warning_records
= 0;
3917 "%i polymorphic calls, %i devirtualized,"
3918 " %i speculatively devirtualized, %i cold\n"
3919 "%i have multiple targets, %i overwritable,"
3920 " %i already speculated (%i agree, %i disagree),"
3921 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3922 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
3923 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
3924 nexternal
, nnotdefined
, nartificial
, ndropped
);
3925 return ndevirtualized
|| ndropped
? TODO_remove_functions
: 0;
3930 const pass_data pass_data_ipa_devirt
=
3932 IPA_PASS
, /* type */
3933 "devirt", /* name */
3934 OPTGROUP_NONE
, /* optinfo_flags */
3935 TV_IPA_DEVIRT
, /* tv_id */
3936 0, /* properties_required */
3937 0, /* properties_provided */
3938 0, /* properties_destroyed */
3939 0, /* todo_flags_start */
3940 ( TODO_dump_symtab
), /* todo_flags_finish */
3943 class pass_ipa_devirt
: public ipa_opt_pass_d
3946 pass_ipa_devirt (gcc::context
*ctxt
)
3947 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
3948 NULL
, /* generate_summary */
3949 NULL
, /* write_summary */
3950 NULL
, /* read_summary */
3951 NULL
, /* write_optimization_summary */
3952 NULL
, /* read_optimization_summary */
3953 NULL
, /* stmt_fixup */
3954 0, /* function_transform_todo_flags_start */
3955 NULL
, /* function_transform */
3956 NULL
) /* variable_transform */
3959 /* opt_pass methods: */
3960 virtual bool gate (function
*)
3962 /* In LTO, always run the IPA passes and decide on function basis if the
3966 return (flag_devirtualize
3967 && (flag_devirtualize_speculatively
3968 || (warn_suggest_final_methods
3969 || warn_suggest_final_types
))
3973 virtual unsigned int execute (function
*) { return ipa_devirt (); }
3975 }; // class pass_ipa_devirt
3980 make_pass_ipa_devirt (gcc::context
*ctxt
)
3982 return new pass_ipa_devirt (ctxt
);
3985 /* Print ODR name of a TYPE if available.
3986 Use demangler when option DEMANGLE is used. */
3989 debug_tree_odr_name (tree type
, bool demangle
)
3991 const char *odr
= get_odr_name_for_type (type
);
3994 const int opts
= DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
;
3995 odr
= cplus_demangle (odr
, opts
);
3998 fprintf (stderr
, "%s\n", odr
);
4001 #include "gt-ipa-devirt.h"