]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2021 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 #include "stringpool.h"
133 #include "attribs.h"
134 #include "data-streamer.h"
135 #include "lto-streamer.h"
136 #include "streamer-hooks.h"
137
138 /* Hash based set of pairs of types. */
139 struct type_pair
140 {
141 tree first;
142 tree second;
143 };
144
145 template <>
146 struct default_hash_traits <type_pair>
147 : typed_noop_remove <type_pair>
148 {
149 GTY((skip)) typedef type_pair value_type;
150 GTY((skip)) typedef type_pair compare_type;
151 static hashval_t
152 hash (type_pair p)
153 {
154 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
155 }
156 static const bool empty_zero_p = true;
157 static bool
158 is_empty (type_pair p)
159 {
160 return p.first == NULL;
161 }
162 static bool
163 is_deleted (type_pair p ATTRIBUTE_UNUSED)
164 {
165 return false;
166 }
167 static bool
168 equal (const type_pair &a, const type_pair &b)
169 {
170 return a.first==b.first && a.second == b.second;
171 }
172 static void
173 mark_empty (type_pair &e)
174 {
175 e.first = NULL;
176 }
177 };
178
179 /* HACK alert: this is used to communicate with ipa-inline-transform that
180 thunk is being expanded and there is no need to clear the polymorphic
181 call target cache. */
182 bool thunk_expansion;
183
184 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
185 hash_set<type_pair> *,
186 location_t, location_t);
187 static void warn_odr (tree t1, tree t2, tree st1, tree st2,
188 bool warn, bool *warned, const char *reason);
189
190 static bool odr_violation_reported = false;
191
192
193 /* Pointer set of all call targets appearing in the cache. */
194 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
195
196 /* The node of type inheritance graph. For each type unique in
197 One Definition Rule (ODR) sense, we produce one node linking all
198 main variants of types equivalent to it, bases and derived types. */
199
200 struct GTY(()) odr_type_d
201 {
202 /* leader type. */
203 tree type;
204 /* All bases; built only for main variants of types. */
205 vec<odr_type> GTY((skip)) bases;
206 /* All derived types with virtual methods seen in unit;
207 built only for main variants of types. */
208 vec<odr_type> GTY((skip)) derived_types;
209
210 /* All equivalent types, if more than one. */
211 vec<tree, va_gc> *types;
212 /* Set of all equivalent types, if NON-NULL. */
213 hash_set<tree> * GTY((skip)) types_set;
214
215 /* Unique ID indexing the type in odr_types array. */
216 int id;
217 /* Is it in anonymous namespace? */
218 bool anonymous_namespace;
219 /* Do we know about all derivations of given type? */
220 bool all_derivations_known;
221 /* Did we report ODR violation here? */
222 bool odr_violated;
223 /* Set when virtual table without RTTI prevailed table with. */
224 bool rtti_broken;
225 /* Set when the canonical type is determined using the type name. */
226 bool tbaa_enabled;
227 };
228
229 /* Return TRUE if all derived types of T are known and thus
230 we may consider the walk of derived type complete.
231
232 This is typically true only for final anonymous namespace types and types
233 defined within functions (that may be COMDAT and thus shared across units,
234 but with the same set of derived types). */
235
236 bool
237 type_all_derivations_known_p (const_tree t)
238 {
239 if (TYPE_FINAL_P (t))
240 return true;
241 if (flag_ltrans)
242 return false;
243 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
244 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
245 return true;
246 if (type_in_anonymous_namespace_p (t))
247 return true;
248 return (decl_function_context (TYPE_NAME (t)) != NULL);
249 }
250
251 /* Return TRUE if type's constructors are all visible. */
252
253 static bool
254 type_all_ctors_visible_p (tree t)
255 {
256 return !flag_ltrans
257 && symtab->state >= CONSTRUCTION
258 /* We cannot always use type_all_derivations_known_p.
259 For function local types we must assume case where
260 the function is COMDAT and shared in between units.
261
262 TODO: These cases are quite easy to get, but we need
263 to keep track of C++ privatizing via -Wno-weak
264 as well as the IPA privatizing. */
265 && type_in_anonymous_namespace_p (t);
266 }
267
268 /* Return TRUE if type may have instance. */
269
270 static bool
271 type_possibly_instantiated_p (tree t)
272 {
273 tree vtable;
274 varpool_node *vnode;
275
276 /* TODO: Add abstract types here. */
277 if (!type_all_ctors_visible_p (t))
278 return true;
279
280 vtable = BINFO_VTABLE (TYPE_BINFO (t));
281 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
282 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
283 vnode = varpool_node::get (vtable);
284 return vnode && vnode->definition;
285 }
286
287 /* Hash used to unify ODR types based on their mangled name and for anonymous
288 namespace types. */
289
290 struct odr_name_hasher : pointer_hash <odr_type_d>
291 {
292 typedef union tree_node *compare_type;
293 static inline hashval_t hash (const odr_type_d *);
294 static inline bool equal (const odr_type_d *, const tree_node *);
295 static inline void remove (odr_type_d *);
296 };
297
298 static bool
299 can_be_name_hashed_p (tree t)
300 {
301 return (!in_lto_p || odr_type_p (t));
302 }
303
304 /* Hash type by its ODR name. */
305
306 static hashval_t
307 hash_odr_name (const_tree t)
308 {
309 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
310
311 /* If not in LTO, all main variants are unique, so we can do
312 pointer hash. */
313 if (!in_lto_p)
314 return htab_hash_pointer (t);
315
316 /* Anonymous types are unique. */
317 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
318 return htab_hash_pointer (t);
319
320 gcc_checking_assert (TYPE_NAME (t)
321 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
322 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
323 }
324
325 /* Return the computed hashcode for ODR_TYPE. */
326
327 inline hashval_t
328 odr_name_hasher::hash (const odr_type_d *odr_type)
329 {
330 return hash_odr_name (odr_type->type);
331 }
332
333 /* For languages with One Definition Rule, work out if
334 types are the same based on their name.
335
336 This is non-trivial for LTO where minor differences in
337 the type representation may have prevented type merging
338 to merge two copies of otherwise equivalent type.
339
340 Until we start streaming mangled type names, this function works
341 only for polymorphic types.
342 */
343
344 bool
345 types_same_for_odr (const_tree type1, const_tree type2)
346 {
347 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
348
349 type1 = TYPE_MAIN_VARIANT (type1);
350 type2 = TYPE_MAIN_VARIANT (type2);
351
352 if (type1 == type2)
353 return true;
354
355 if (!in_lto_p)
356 return false;
357
358 /* Anonymous namespace types are never duplicated. */
359 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
360 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
361 return false;
362
363 /* If both type has mangled defined check if they are same.
364 Watch for anonymous types which are all mangled as "<anon">. */
365 if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
366 return false;
367 if (type_in_anonymous_namespace_p (type1)
368 || type_in_anonymous_namespace_p (type2))
369 return false;
370 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
371 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
372 }
373
374 /* Return true if we can decide on ODR equivalency.
375
376 In non-LTO it is always decide, in LTO however it depends in the type has
377 ODR info attached. */
378
379 bool
380 types_odr_comparable (tree t1, tree t2)
381 {
382 return (!in_lto_p
383 || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
384 || (odr_type_p (TYPE_MAIN_VARIANT (t1))
385 && odr_type_p (TYPE_MAIN_VARIANT (t2))));
386 }
387
388 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
389 known, be conservative and return false. */
390
391 bool
392 types_must_be_same_for_odr (tree t1, tree t2)
393 {
394 if (types_odr_comparable (t1, t2))
395 return types_same_for_odr (t1, t2);
396 else
397 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
398 }
399
400 /* If T is compound type, return type it is based on. */
401
402 static tree
403 compound_type_base (const_tree t)
404 {
405 if (TREE_CODE (t) == ARRAY_TYPE
406 || POINTER_TYPE_P (t)
407 || TREE_CODE (t) == COMPLEX_TYPE
408 || VECTOR_TYPE_P (t))
409 return TREE_TYPE (t);
410 if (TREE_CODE (t) == METHOD_TYPE)
411 return TYPE_METHOD_BASETYPE (t);
412 if (TREE_CODE (t) == OFFSET_TYPE)
413 return TYPE_OFFSET_BASETYPE (t);
414 return NULL_TREE;
415 }
416
417 /* Return true if T is either ODR type or compound type based from it.
418 If the function return true, we know that T is a type originating from C++
419 source even at link-time. */
420
421 bool
422 odr_or_derived_type_p (const_tree t)
423 {
424 do
425 {
426 if (odr_type_p (TYPE_MAIN_VARIANT (t)))
427 return true;
428 /* Function type is a tricky one. Basically we can consider it
429 ODR derived if return type or any of the parameters is.
430 We need to check all parameters because LTO streaming merges
431 common types (such as void) and they are not considered ODR then. */
432 if (TREE_CODE (t) == FUNCTION_TYPE)
433 {
434 if (TYPE_METHOD_BASETYPE (t))
435 t = TYPE_METHOD_BASETYPE (t);
436 else
437 {
438 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
439 return true;
440 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
441 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
442 return true;
443 return false;
444 }
445 }
446 else
447 t = compound_type_base (t);
448 }
449 while (t);
450 return t;
451 }
452
453 /* Compare types T1 and T2 and return true if they are
454 equivalent. */
455
456 inline bool
457 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
458 {
459 tree t1 = o1->type;
460
461 gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
462 gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
463 if (t1 == t2)
464 return true;
465 if (!in_lto_p)
466 return false;
467 /* Check for anonymous namespaces. */
468 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
469 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
470 return false;
471 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
472 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
473 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
474 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
475 }
476
477 /* Free ODR type V. */
478
479 inline void
480 odr_name_hasher::remove (odr_type_d *v)
481 {
482 v->bases.release ();
483 v->derived_types.release ();
484 if (v->types_set)
485 delete v->types_set;
486 ggc_free (v);
487 }
488
489 /* ODR type hash used to look up ODR type based on tree type node. */
490
491 typedef hash_table<odr_name_hasher> odr_hash_type;
492 static odr_hash_type *odr_hash;
493
494 /* ODR types are also stored into ODR_TYPE vector to allow consistent
495 walking. Bases appear before derived types. Vector is garbage collected
496 so we won't end up visiting empty types. */
497
498 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
499 #define odr_types (*odr_types_ptr)
500
501 /* All enums defined and accessible for the unit. */
502 static GTY(()) vec <tree, va_gc> *odr_enums;
503
504 /* Information we hold about value defined by an enum type. */
505 struct odr_enum_val
506 {
507 const char *name;
508 wide_int val;
509 location_t locus;
510 };
511
512 /* Information about enum values. */
513 struct odr_enum
514 {
515 location_t locus;
516 auto_vec<odr_enum_val, 0> vals;
517 bool warned;
518 };
519
520 /* A table of all ODR enum definitions. */
521 static hash_map <nofree_string_hash, odr_enum> *odr_enum_map = NULL;
522 static struct obstack odr_enum_obstack;
523
524 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
525 void
526 set_type_binfo (tree type, tree binfo)
527 {
528 for (; type; type = TYPE_NEXT_VARIANT (type))
529 if (COMPLETE_TYPE_P (type))
530 TYPE_BINFO (type) = binfo;
531 else
532 gcc_assert (!TYPE_BINFO (type));
533 }
534
535 /* Return true if type variants match.
536 This assumes that we already verified that T1 and T2 are variants of the
537 same type. */
538
539 static bool
540 type_variants_equivalent_p (tree t1, tree t2)
541 {
542 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
543 return false;
544
545 if (comp_type_attributes (t1, t2) != 1)
546 return false;
547
548 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
549 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
550 return false;
551
552 return true;
553 }
554
555 /* Compare T1 and T2 based on name or structure. */
556
557 static bool
558 odr_subtypes_equivalent_p (tree t1, tree t2,
559 hash_set<type_pair> *visited,
560 location_t loc1, location_t loc2)
561 {
562
563 /* This can happen in incomplete types that should be handled earlier. */
564 gcc_assert (t1 && t2);
565
566 if (t1 == t2)
567 return true;
568
569 /* Anonymous namespace types must match exactly. */
570 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
571 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
572 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
573 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
574 return false;
575
576 /* For ODR types be sure to compare their names.
577 To support -Wno-odr-type-merging we allow one type to be non-ODR
578 and other ODR even though it is a violation. */
579 if (types_odr_comparable (t1, t2))
580 {
581 if (t1 != t2
582 && odr_type_p (TYPE_MAIN_VARIANT (t1))
583 && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
584 return false;
585 if (!types_same_for_odr (t1, t2))
586 return false;
587 if (!type_variants_equivalent_p (t1, t2))
588 return false;
589 /* Limit recursion: If subtypes are ODR types and we know
590 that they are same, be happy. */
591 if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
592 return true;
593 }
594
595 /* Component types, builtins and possibly violating ODR types
596 have to be compared structurally. */
597 if (TREE_CODE (t1) != TREE_CODE (t2))
598 return false;
599 if (AGGREGATE_TYPE_P (t1)
600 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
601 return false;
602
603 type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
604 if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
605 {
606 pair.first = TYPE_MAIN_VARIANT (t2);
607 pair.second = TYPE_MAIN_VARIANT (t1);
608 }
609 if (visited->add (pair))
610 return true;
611 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
612 false, NULL, visited, loc1, loc2))
613 return false;
614 if (!type_variants_equivalent_p (t1, t2))
615 return false;
616 return true;
617 }
618
619 /* Return true if DECL1 and DECL2 are identical methods. Consider
620 name equivalent to name.localalias.xyz. */
621
622 static bool
623 methods_equal_p (tree decl1, tree decl2)
624 {
625 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
626 return true;
627 const char sep = symbol_table::symbol_suffix_separator ();
628
629 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
630 const char *ptr1 = strchr (name1, sep);
631 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
632
633 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
634 const char *ptr2 = strchr (name2, sep);
635 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
636
637 if (len1 != len2)
638 return false;
639 return !strncmp (name1, name2, len1);
640 }
641
642 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
643 violation warnings. */
644
645 void
646 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
647 {
648 int n1, n2;
649
650 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
651 {
652 odr_violation_reported = true;
653 if (DECL_VIRTUAL_P (prevailing->decl))
654 {
655 varpool_node *tmp = prevailing;
656 prevailing = vtable;
657 vtable = tmp;
658 }
659 auto_diagnostic_group d;
660 if (warning_at (DECL_SOURCE_LOCATION
661 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
662 OPT_Wodr,
663 "virtual table of type %qD violates one definition rule",
664 DECL_CONTEXT (vtable->decl)))
665 inform (DECL_SOURCE_LOCATION (prevailing->decl),
666 "variable of same assembler name as the virtual table is "
667 "defined in another translation unit");
668 return;
669 }
670 if (!prevailing->definition || !vtable->definition)
671 return;
672
673 /* If we do not stream ODR type info, do not bother to do useful compare. */
674 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
675 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
676 return;
677
678 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
679
680 if (class_type->odr_violated)
681 return;
682
683 for (n1 = 0, n2 = 0; true; n1++, n2++)
684 {
685 struct ipa_ref *ref1, *ref2;
686 bool end1, end2;
687
688 end1 = !prevailing->iterate_reference (n1, ref1);
689 end2 = !vtable->iterate_reference (n2, ref2);
690
691 /* !DECL_VIRTUAL_P means RTTI entry;
692 We warn when RTTI is lost because non-RTTI prevails; we silently
693 accept the other case. */
694 while (!end2
695 && (end1
696 || (methods_equal_p (ref1->referred->decl,
697 ref2->referred->decl)
698 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
699 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
700 {
701 if (!class_type->rtti_broken)
702 {
703 auto_diagnostic_group d;
704 if (warning_at (DECL_SOURCE_LOCATION
705 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
706 OPT_Wodr,
707 "virtual table of type %qD contains RTTI "
708 "information",
709 DECL_CONTEXT (vtable->decl)))
710 {
711 inform (DECL_SOURCE_LOCATION
712 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
713 "but is prevailed by one without from other"
714 " translation unit");
715 inform (DECL_SOURCE_LOCATION
716 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
717 "RTTI will not work on this type");
718 class_type->rtti_broken = true;
719 }
720 }
721 n2++;
722 end2 = !vtable->iterate_reference (n2, ref2);
723 }
724 while (!end1
725 && (end2
726 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
727 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
728 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
729 {
730 n1++;
731 end1 = !prevailing->iterate_reference (n1, ref1);
732 }
733
734 /* Finished? */
735 if (end1 && end2)
736 {
737 /* Extra paranoia; compare the sizes. We do not have information
738 about virtual inheritance offsets, so just be sure that these
739 match.
740 Do this as very last check so the not very informative error
741 is not output too often. */
742 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
743 {
744 class_type->odr_violated = true;
745 auto_diagnostic_group d;
746 tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
747 if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
748 "virtual table of type %qD violates "
749 "one definition rule",
750 DECL_CONTEXT (vtable->decl)))
751 {
752 ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
753 inform (DECL_SOURCE_LOCATION (ctx),
754 "the conflicting type defined in another translation"
755 " unit has virtual table of different size");
756 }
757 }
758 return;
759 }
760
761 if (!end1 && !end2)
762 {
763 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
764 continue;
765
766 class_type->odr_violated = true;
767
768 /* If the loops above stopped on non-virtual pointer, we have
769 mismatch in RTTI information mangling. */
770 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
771 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
772 {
773 auto_diagnostic_group d;
774 if (warning_at (DECL_SOURCE_LOCATION
775 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
776 OPT_Wodr,
777 "virtual table of type %qD violates "
778 "one definition rule",
779 DECL_CONTEXT (vtable->decl)))
780 {
781 inform (DECL_SOURCE_LOCATION
782 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
783 "the conflicting type defined in another translation "
784 "unit with different RTTI information");
785 }
786 return;
787 }
788 /* At this point both REF1 and REF2 points either to virtual table
789 or virtual method. If one points to virtual table and other to
790 method we can complain the same way as if one table was shorter
791 than other pointing out the extra method. */
792 if (TREE_CODE (ref1->referred->decl)
793 != TREE_CODE (ref2->referred->decl))
794 {
795 if (VAR_P (ref1->referred->decl))
796 end1 = true;
797 else if (VAR_P (ref2->referred->decl))
798 end2 = true;
799 }
800 }
801
802 class_type->odr_violated = true;
803
804 /* Complain about size mismatch. Either we have too many virtual
805 functions or too many virtual table pointers. */
806 if (end1 || end2)
807 {
808 if (end1)
809 {
810 varpool_node *tmp = prevailing;
811 prevailing = vtable;
812 vtable = tmp;
813 ref1 = ref2;
814 }
815 auto_diagnostic_group d;
816 if (warning_at (DECL_SOURCE_LOCATION
817 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
818 OPT_Wodr,
819 "virtual table of type %qD violates "
820 "one definition rule",
821 DECL_CONTEXT (vtable->decl)))
822 {
823 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
824 {
825 inform (DECL_SOURCE_LOCATION
826 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
827 "the conflicting type defined in another translation "
828 "unit");
829 inform (DECL_SOURCE_LOCATION
830 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
831 "contains additional virtual method %qD",
832 ref1->referred->decl);
833 }
834 else
835 {
836 inform (DECL_SOURCE_LOCATION
837 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
838 "the conflicting type defined in another translation "
839 "unit has virtual table with more entries");
840 }
841 }
842 return;
843 }
844
845 /* And in the last case we have either mismatch in between two virtual
846 methods or two virtual table pointers. */
847 auto_diagnostic_group d;
848 if (warning_at (DECL_SOURCE_LOCATION
849 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
850 "virtual table of type %qD violates "
851 "one definition rule",
852 DECL_CONTEXT (vtable->decl)))
853 {
854 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
855 {
856 inform (DECL_SOURCE_LOCATION
857 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
858 "the conflicting type defined in another translation "
859 "unit");
860 gcc_assert (TREE_CODE (ref2->referred->decl)
861 == FUNCTION_DECL);
862 inform (DECL_SOURCE_LOCATION
863 (ref1->referred->ultimate_alias_target ()->decl),
864 "virtual method %qD",
865 ref1->referred->ultimate_alias_target ()->decl);
866 inform (DECL_SOURCE_LOCATION
867 (ref2->referred->ultimate_alias_target ()->decl),
868 "ought to match virtual method %qD but does not",
869 ref2->referred->ultimate_alias_target ()->decl);
870 }
871 else
872 inform (DECL_SOURCE_LOCATION
873 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
874 "the conflicting type defined in another translation "
875 "unit has virtual table with different contents");
876 return;
877 }
878 }
879 }
880
881 /* Output ODR violation warning about T1 and T2 with REASON.
882 Display location of ST1 and ST2 if REASON speaks about field or
883 method of the type.
884 If WARN is false, do nothing. Set WARNED if warning was indeed
885 output. */
886
887 static void
888 warn_odr (tree t1, tree t2, tree st1, tree st2,
889 bool warn, bool *warned, const char *reason)
890 {
891 tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
892 if (warned)
893 *warned = false;
894
895 if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
896 return;
897
898 /* ODR warnings are output during LTO streaming; we must apply location
899 cache for potential warnings to be output correctly. */
900 if (lto_location_cache::current_cache)
901 lto_location_cache::current_cache->apply_location_cache ();
902
903 auto_diagnostic_group d;
904 if (t1 != TYPE_MAIN_VARIANT (t1)
905 && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
906 {
907 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
908 OPT_Wodr, "type %qT (typedef of %qT) violates the "
909 "C++ One Definition Rule",
910 t1, TYPE_MAIN_VARIANT (t1)))
911 return;
912 }
913 else
914 {
915 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
916 OPT_Wodr, "type %qT violates the C++ One Definition Rule",
917 t1))
918 return;
919 }
920 if (!st1 && !st2)
921 ;
922 /* For FIELD_DECL support also case where one of fields is
923 NULL - this is used when the structures have mismatching number of
924 elements. */
925 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
926 {
927 inform (DECL_SOURCE_LOCATION (decl2),
928 "a different type is defined in another translation unit");
929 if (!st1)
930 {
931 st1 = st2;
932 st2 = NULL;
933 }
934 inform (DECL_SOURCE_LOCATION (st1),
935 "the first difference of corresponding definitions is field %qD",
936 st1);
937 if (st2)
938 decl2 = st2;
939 }
940 else if (TREE_CODE (st1) == FUNCTION_DECL)
941 {
942 inform (DECL_SOURCE_LOCATION (decl2),
943 "a different type is defined in another translation unit");
944 inform (DECL_SOURCE_LOCATION (st1),
945 "the first difference of corresponding definitions is method %qD",
946 st1);
947 decl2 = st2;
948 }
949 else
950 return;
951 inform (DECL_SOURCE_LOCATION (decl2), reason);
952
953 if (warned)
954 *warned = true;
955 }
956
957 /* Return true if T1 and T2 are incompatible and we want to recursively
958 dive into them from warn_type_mismatch to give sensible answer. */
959
960 static bool
961 type_mismatch_p (tree t1, tree t2)
962 {
963 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
964 && !odr_types_equivalent_p (t1, t2))
965 return true;
966 return !types_compatible_p (t1, t2);
967 }
968
969
970 /* Types T1 and T2 was found to be incompatible in a context they can't
971 (either used to declare a symbol of same assembler name or unified by
972 ODR rule). We already output warning about this, but if possible, output
973 extra information on how the types mismatch.
974
975 This is hard to do in general. We basically handle the common cases.
976
977 If LOC1 and LOC2 are meaningful locations, use it in the case the types
978 themselves do not have one. */
979
980 void
981 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
982 {
983 /* Location of type is known only if it has TYPE_NAME and the name is
984 TYPE_DECL. */
985 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
986 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
987 : UNKNOWN_LOCATION;
988 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
989 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
990 : UNKNOWN_LOCATION;
991 bool loc_t2_useful = false;
992
993 /* With LTO it is a common case that the location of both types match.
994 See if T2 has a location that is different from T1. If so, we will
995 inform user about the location.
996 Do not consider the location passed to us in LOC1/LOC2 as those are
997 already output. */
998 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
999 {
1000 if (loc_t1 <= BUILTINS_LOCATION)
1001 loc_t2_useful = true;
1002 else
1003 {
1004 expanded_location xloc1 = expand_location (loc_t1);
1005 expanded_location xloc2 = expand_location (loc_t2);
1006
1007 if (strcmp (xloc1.file, xloc2.file)
1008 || xloc1.line != xloc2.line
1009 || xloc1.column != xloc2.column)
1010 loc_t2_useful = true;
1011 }
1012 }
1013
1014 if (loc_t1 <= BUILTINS_LOCATION)
1015 loc_t1 = loc1;
1016 if (loc_t2 <= BUILTINS_LOCATION)
1017 loc_t2 = loc2;
1018
1019 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1020
1021 /* It is a quite common bug to reference anonymous namespace type in
1022 non-anonymous namespace class. */
1023 tree mt1 = TYPE_MAIN_VARIANT (t1);
1024 tree mt2 = TYPE_MAIN_VARIANT (t2);
1025 if ((type_with_linkage_p (mt1)
1026 && type_in_anonymous_namespace_p (mt1))
1027 || (type_with_linkage_p (mt2)
1028 && type_in_anonymous_namespace_p (mt2)))
1029 {
1030 if (!type_with_linkage_p (mt1)
1031 || !type_in_anonymous_namespace_p (mt1))
1032 {
1033 std::swap (t1, t2);
1034 std::swap (mt1, mt2);
1035 std::swap (loc_t1, loc_t2);
1036 }
1037 gcc_assert (TYPE_NAME (mt1)
1038 && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
1039 tree n1 = TYPE_NAME (mt1);
1040 tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
1041
1042 if (TREE_CODE (n1) == TYPE_DECL)
1043 n1 = DECL_NAME (n1);
1044 if (n2 && TREE_CODE (n2) == TYPE_DECL)
1045 n2 = DECL_NAME (n2);
1046 /* Most of the time, the type names will match, do not be unnecessarily
1047 verbose. */
1048 if (n1 != n2)
1049 inform (loc_t1,
1050 "type %qT defined in anonymous namespace cannot match "
1051 "type %qT across the translation unit boundary",
1052 t1, t2);
1053 else
1054 inform (loc_t1,
1055 "type %qT defined in anonymous namespace cannot match "
1056 "across the translation unit boundary",
1057 t1);
1058 if (loc_t2_useful)
1059 inform (loc_t2,
1060 "the incompatible type defined in another translation unit");
1061 return;
1062 }
1063 /* If types have mangled ODR names and they are different, it is most
1064 informative to output those.
1065 This also covers types defined in different namespaces. */
1066 const char *odr1 = get_odr_name_for_type (mt1);
1067 const char *odr2 = get_odr_name_for_type (mt2);
1068 if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
1069 {
1070 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
1071 char *name1 = xstrdup (cplus_demangle (odr1, opts));
1072 char *name2 = cplus_demangle (odr2, opts);
1073 if (name1 && name2 && strcmp (name1, name2))
1074 {
1075 inform (loc_t1,
1076 "type name %qs should match type name %qs",
1077 name1, name2);
1078 if (loc_t2_useful)
1079 inform (loc_t2,
1080 "the incompatible type is defined here");
1081 free (name1);
1082 return;
1083 }
1084 free (name1);
1085 }
1086 /* A tricky case are compound types. Often they appear the same in source
1087 code and the mismatch is dragged in by type they are build from.
1088 Look for those differences in subtypes and try to be informative. In other
1089 cases just output nothing because the source code is probably different
1090 and in this case we already output a all necessary info. */
1091 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1092 {
1093 if (TREE_CODE (t1) == TREE_CODE (t2))
1094 {
1095 if (TREE_CODE (t1) == ARRAY_TYPE
1096 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1097 {
1098 tree i1 = TYPE_DOMAIN (t1);
1099 tree i2 = TYPE_DOMAIN (t2);
1100
1101 if (i1 && i2
1102 && TYPE_MAX_VALUE (i1)
1103 && TYPE_MAX_VALUE (i2)
1104 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1105 TYPE_MAX_VALUE (i2), 0))
1106 {
1107 inform (loc,
1108 "array types have different bounds");
1109 return;
1110 }
1111 }
1112 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1113 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1114 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1115 else if (TREE_CODE (t1) == METHOD_TYPE
1116 || TREE_CODE (t1) == FUNCTION_TYPE)
1117 {
1118 tree parms1 = NULL, parms2 = NULL;
1119 int count = 1;
1120
1121 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1122 {
1123 inform (loc, "return value type mismatch");
1124 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1125 loc_t2);
1126 return;
1127 }
1128 if (prototype_p (t1) && prototype_p (t2))
1129 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1130 parms1 && parms2;
1131 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1132 count++)
1133 {
1134 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1135 {
1136 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1137 inform (loc,
1138 "implicit this pointer type mismatch");
1139 else
1140 inform (loc,
1141 "type mismatch in parameter %i",
1142 count - (TREE_CODE (t1) == METHOD_TYPE));
1143 warn_types_mismatch (TREE_VALUE (parms1),
1144 TREE_VALUE (parms2),
1145 loc_t1, loc_t2);
1146 return;
1147 }
1148 }
1149 if (parms1 || parms2)
1150 {
1151 inform (loc,
1152 "types have different parameter counts");
1153 return;
1154 }
1155 }
1156 }
1157 return;
1158 }
1159
1160 if (types_odr_comparable (t1, t2)
1161 /* We make assign integers mangled names to be able to handle
1162 signed/unsigned chars. Accepting them here would however lead to
1163 confusing message like
1164 "type ‘const int’ itself violates the C++ One Definition Rule" */
1165 && TREE_CODE (t1) != INTEGER_TYPE
1166 && types_same_for_odr (t1, t2))
1167 inform (loc_t1,
1168 "type %qT itself violates the C++ One Definition Rule", t1);
1169 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1170 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1171 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1172 return;
1173 else
1174 inform (loc_t1, "type %qT should match type %qT",
1175 t1, t2);
1176 if (loc_t2_useful)
1177 inform (loc_t2, "the incompatible type is defined here");
1178 }
1179
1180 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1181
1182 static bool
1183 skip_in_fields_list_p (tree t)
1184 {
1185 if (TREE_CODE (t) != FIELD_DECL)
1186 return true;
1187 /* C++ FE introduces zero sized fields depending on -std setting, see
1188 PR89358. */
1189 if (DECL_SIZE (t)
1190 && integer_zerop (DECL_SIZE (t))
1191 && DECL_ARTIFICIAL (t)
1192 && DECL_IGNORED_P (t)
1193 && !DECL_NAME (t))
1194 return true;
1195 return false;
1196 }
1197
1198 /* Compare T1 and T2, report ODR violations if WARN is true and set
1199 WARNED to true if anything is reported. Return true if types match.
1200 If true is returned, the types are also compatible in the sense of
1201 gimple_canonical_types_compatible_p.
1202 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1203 about the type if the type itself do not have location. */
1204
1205 static bool
1206 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1207 hash_set<type_pair> *visited,
1208 location_t loc1, location_t loc2)
1209 {
1210 /* Check first for the obvious case of pointer identity. */
1211 if (t1 == t2)
1212 return true;
1213
1214 /* Can't be the same type if the types don't have the same code. */
1215 if (TREE_CODE (t1) != TREE_CODE (t2))
1216 {
1217 warn_odr (t1, t2, NULL, NULL, warn, warned,
1218 G_("a different type is defined in another translation unit"));
1219 return false;
1220 }
1221
1222 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1223 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1224 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1225 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1226 {
1227 /* We cannot trip this when comparing ODR types, only when trying to
1228 match different ODR derivations from different declarations.
1229 So WARN should be always false. */
1230 gcc_assert (!warn);
1231 return false;
1232 }
1233
1234 /* Non-aggregate types can be handled cheaply. */
1235 if (INTEGRAL_TYPE_P (t1)
1236 || SCALAR_FLOAT_TYPE_P (t1)
1237 || FIXED_POINT_TYPE_P (t1)
1238 || TREE_CODE (t1) == VECTOR_TYPE
1239 || TREE_CODE (t1) == COMPLEX_TYPE
1240 || TREE_CODE (t1) == OFFSET_TYPE
1241 || POINTER_TYPE_P (t1))
1242 {
1243 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1244 {
1245 warn_odr (t1, t2, NULL, NULL, warn, warned,
1246 G_("a type with different precision is defined "
1247 "in another translation unit"));
1248 return false;
1249 }
1250 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1251 {
1252 warn_odr (t1, t2, NULL, NULL, warn, warned,
1253 G_("a type with different signedness is defined "
1254 "in another translation unit"));
1255 return false;
1256 }
1257
1258 if (TREE_CODE (t1) == INTEGER_TYPE
1259 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1260 {
1261 /* char WRT uint_8? */
1262 warn_odr (t1, t2, NULL, NULL, warn, warned,
1263 G_("a different type is defined in another "
1264 "translation unit"));
1265 return false;
1266 }
1267
1268 /* For canonical type comparisons we do not want to build SCCs
1269 so we cannot compare pointed-to types. But we can, for now,
1270 require the same pointed-to type kind and match what
1271 useless_type_conversion_p would do. */
1272 if (POINTER_TYPE_P (t1))
1273 {
1274 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1275 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1276 {
1277 warn_odr (t1, t2, NULL, NULL, warn, warned,
1278 G_("it is defined as a pointer in different address "
1279 "space in another translation unit"));
1280 return false;
1281 }
1282
1283 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1284 visited, loc1, loc2))
1285 {
1286 warn_odr (t1, t2, NULL, NULL, warn, warned,
1287 G_("it is defined as a pointer to different type "
1288 "in another translation unit"));
1289 if (warn && warned)
1290 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1291 loc1, loc2);
1292 return false;
1293 }
1294 }
1295
1296 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1297 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1298 visited, loc1, loc2))
1299 {
1300 /* Probably specific enough. */
1301 warn_odr (t1, t2, NULL, NULL, warn, warned,
1302 G_("a different type is defined "
1303 "in another translation unit"));
1304 if (warn && warned)
1305 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1306 return false;
1307 }
1308 }
1309 /* Do type-specific comparisons. */
1310 else switch (TREE_CODE (t1))
1311 {
1312 case ARRAY_TYPE:
1313 {
1314 /* Array types are the same if the element types are the same and
1315 the number of elements are the same. */
1316 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1317 visited, loc1, loc2))
1318 {
1319 warn_odr (t1, t2, NULL, NULL, warn, warned,
1320 G_("a different type is defined in another "
1321 "translation unit"));
1322 if (warn && warned)
1323 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1324 }
1325 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1326 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1327 == TYPE_NONALIASED_COMPONENT (t2));
1328
1329 tree i1 = TYPE_DOMAIN (t1);
1330 tree i2 = TYPE_DOMAIN (t2);
1331
1332 /* For an incomplete external array, the type domain can be
1333 NULL_TREE. Check this condition also. */
1334 if (i1 == NULL_TREE || i2 == NULL_TREE)
1335 return type_variants_equivalent_p (t1, t2);
1336
1337 tree min1 = TYPE_MIN_VALUE (i1);
1338 tree min2 = TYPE_MIN_VALUE (i2);
1339 tree max1 = TYPE_MAX_VALUE (i1);
1340 tree max2 = TYPE_MAX_VALUE (i2);
1341
1342 /* In C++, minimums should be always 0. */
1343 gcc_assert (min1 == min2);
1344 if (!operand_equal_p (max1, max2, 0))
1345 {
1346 warn_odr (t1, t2, NULL, NULL, warn, warned,
1347 G_("an array of different size is defined "
1348 "in another translation unit"));
1349 return false;
1350 }
1351 }
1352 break;
1353
1354 case METHOD_TYPE:
1355 case FUNCTION_TYPE:
1356 /* Function types are the same if the return type and arguments types
1357 are the same. */
1358 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1359 visited, loc1, loc2))
1360 {
1361 warn_odr (t1, t2, NULL, NULL, warn, warned,
1362 G_("has different return value "
1363 "in another translation unit"));
1364 if (warn && warned)
1365 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1366 return false;
1367 }
1368
1369 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1370 || !prototype_p (t1) || !prototype_p (t2))
1371 return type_variants_equivalent_p (t1, t2);
1372 else
1373 {
1374 tree parms1, parms2;
1375
1376 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1377 parms1 && parms2;
1378 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1379 {
1380 if (!odr_subtypes_equivalent_p
1381 (TREE_VALUE (parms1), TREE_VALUE (parms2),
1382 visited, loc1, loc2))
1383 {
1384 warn_odr (t1, t2, NULL, NULL, warn, warned,
1385 G_("has different parameters in another "
1386 "translation unit"));
1387 if (warn && warned)
1388 warn_types_mismatch (TREE_VALUE (parms1),
1389 TREE_VALUE (parms2), loc1, loc2);
1390 return false;
1391 }
1392 }
1393
1394 if (parms1 || parms2)
1395 {
1396 warn_odr (t1, t2, NULL, NULL, warn, warned,
1397 G_("has different parameters "
1398 "in another translation unit"));
1399 return false;
1400 }
1401
1402 return type_variants_equivalent_p (t1, t2);
1403 }
1404
1405 case RECORD_TYPE:
1406 case UNION_TYPE:
1407 case QUAL_UNION_TYPE:
1408 {
1409 tree f1, f2;
1410
1411 /* For aggregate types, all the fields must be the same. */
1412 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1413 {
1414 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1415 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1416 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1417 {
1418 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1419 warn_odr (t1, t2, NULL, NULL, warn, warned,
1420 G_("a type defined in another translation unit "
1421 "is not polymorphic"));
1422 else
1423 warn_odr (t1, t2, NULL, NULL, warn, warned,
1424 G_("a type defined in another translation unit "
1425 "is polymorphic"));
1426 return false;
1427 }
1428 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1429 f1 || f2;
1430 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1431 {
1432 /* Skip non-fields. */
1433 while (f1 && skip_in_fields_list_p (f1))
1434 f1 = TREE_CHAIN (f1);
1435 while (f2 && skip_in_fields_list_p (f2))
1436 f2 = TREE_CHAIN (f2);
1437 if (!f1 || !f2)
1438 break;
1439 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1440 {
1441 warn_odr (t1, t2, NULL, NULL, warn, warned,
1442 G_("a type with different virtual table pointers"
1443 " is defined in another translation unit"));
1444 return false;
1445 }
1446 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1447 {
1448 warn_odr (t1, t2, NULL, NULL, warn, warned,
1449 G_("a type with different bases is defined "
1450 "in another translation unit"));
1451 return false;
1452 }
1453 if (DECL_NAME (f1) != DECL_NAME (f2)
1454 && !DECL_ARTIFICIAL (f1))
1455 {
1456 warn_odr (t1, t2, f1, f2, warn, warned,
1457 G_("a field with different name is defined "
1458 "in another translation unit"));
1459 return false;
1460 }
1461 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1462 TREE_TYPE (f2),
1463 visited, loc1, loc2))
1464 {
1465 /* Do not warn about artificial fields and just go into
1466 generic field mismatch warning. */
1467 if (DECL_ARTIFICIAL (f1))
1468 break;
1469
1470 warn_odr (t1, t2, f1, f2, warn, warned,
1471 G_("a field of same name but different type "
1472 "is defined in another translation unit"));
1473 if (warn && warned)
1474 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1475 return false;
1476 }
1477 if (!gimple_compare_field_offset (f1, f2))
1478 {
1479 /* Do not warn about artificial fields and just go into
1480 generic field mismatch warning. */
1481 if (DECL_ARTIFICIAL (f1))
1482 break;
1483 warn_odr (t1, t2, f1, f2, warn, warned,
1484 G_("fields have different layout "
1485 "in another translation unit"));
1486 return false;
1487 }
1488 if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1489 {
1490 warn_odr (t1, t2, f1, f2, warn, warned,
1491 G_("one field is a bitfield while the other "
1492 "is not"));
1493 return false;
1494 }
1495 else
1496 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1497 == DECL_NONADDRESSABLE_P (f2));
1498 }
1499
1500 /* If one aggregate has more fields than the other, they
1501 are not the same. */
1502 if (f1 || f2)
1503 {
1504 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1505 warn_odr (t1, t2, NULL, NULL, warn, warned,
1506 G_("a type with different virtual table pointers"
1507 " is defined in another translation unit"));
1508 else if ((f1 && DECL_ARTIFICIAL (f1))
1509 || (f2 && DECL_ARTIFICIAL (f2)))
1510 warn_odr (t1, t2, NULL, NULL, warn, warned,
1511 G_("a type with different bases is defined "
1512 "in another translation unit"));
1513 else
1514 warn_odr (t1, t2, f1, f2, warn, warned,
1515 G_("a type with different number of fields "
1516 "is defined in another translation unit"));
1517
1518 return false;
1519 }
1520 }
1521 break;
1522 }
1523 case VOID_TYPE:
1524 case OPAQUE_TYPE:
1525 case NULLPTR_TYPE:
1526 break;
1527
1528 default:
1529 debug_tree (t1);
1530 gcc_unreachable ();
1531 }
1532
1533 /* Those are better to come last as they are utterly uninformative. */
1534 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1535 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1536 {
1537 warn_odr (t1, t2, NULL, NULL, warn, warned,
1538 G_("a type with different size "
1539 "is defined in another translation unit"));
1540 return false;
1541 }
1542
1543 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2)
1544 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1545 {
1546 warn_odr (t1, t2, NULL, NULL, warn, warned,
1547 G_("one type needs to be constructed while the other does not"));
1548 gcc_checking_assert (RECORD_OR_UNION_TYPE_P (t1));
1549 return false;
1550 }
1551 /* There is no really good user facing warning for this.
1552 Either the original reason for modes being different is lost during
1553 streaming or we should catch earlier warnings. We however must detect
1554 the mismatch to avoid type verifier from cmplaining on mismatched
1555 types between type and canonical type. See PR91576. */
1556 if (TYPE_MODE (t1) != TYPE_MODE (t2)
1557 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1558 {
1559 warn_odr (t1, t2, NULL, NULL, warn, warned,
1560 G_("memory layout mismatch"));
1561 return false;
1562 }
1563
1564 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1565 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1566 TYPE_SIZE_UNIT (t2), 0));
1567 return type_variants_equivalent_p (t1, t2);
1568 }
1569
1570 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1571
1572 bool
1573 odr_types_equivalent_p (tree type1, tree type2)
1574 {
1575 gcc_checking_assert (odr_or_derived_type_p (type1)
1576 && odr_or_derived_type_p (type2));
1577
1578 hash_set<type_pair> visited;
1579 return odr_types_equivalent_p (type1, type2, false, NULL,
1580 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1581 }
1582
1583 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1584 from VAL->type. This may happen in LTO where tree merging did not merge
1585 all variants of the same type or due to ODR violation.
1586
1587 Analyze and report ODR violations and add type to duplicate list.
1588 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1589 this is first time we see definition of a class return true so the
1590 base types are analyzed. */
1591
1592 static bool
1593 add_type_duplicate (odr_type val, tree type)
1594 {
1595 bool build_bases = false;
1596 bool prevail = false;
1597 bool odr_must_violate = false;
1598
1599 if (!val->types_set)
1600 val->types_set = new hash_set<tree>;
1601
1602 /* Chose polymorphic type as leader (this happens only in case of ODR
1603 violations. */
1604 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1605 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1606 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1607 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1608 {
1609 prevail = true;
1610 build_bases = true;
1611 }
1612 /* Always prefer complete type to be the leader. */
1613 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1614 {
1615 prevail = true;
1616 if (TREE_CODE (type) == RECORD_TYPE)
1617 build_bases = TYPE_BINFO (type);
1618 }
1619 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1620 ;
1621 else if (TREE_CODE (val->type) == RECORD_TYPE
1622 && TREE_CODE (type) == RECORD_TYPE
1623 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1624 {
1625 gcc_assert (!val->bases.length ());
1626 build_bases = true;
1627 prevail = true;
1628 }
1629
1630 if (prevail)
1631 std::swap (val->type, type);
1632
1633 val->types_set->add (type);
1634
1635 if (!odr_hash)
1636 return false;
1637
1638 gcc_checking_assert (can_be_name_hashed_p (type)
1639 && can_be_name_hashed_p (val->type));
1640
1641 bool merge = true;
1642 bool base_mismatch = false;
1643 unsigned int i;
1644 bool warned = false;
1645 hash_set<type_pair> visited;
1646
1647 gcc_assert (in_lto_p);
1648 vec_safe_push (val->types, type);
1649
1650 /* If both are class types, compare the bases. */
1651 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1652 && TREE_CODE (val->type) == RECORD_TYPE
1653 && TREE_CODE (type) == RECORD_TYPE
1654 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1655 {
1656 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1657 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1658 {
1659 if (!flag_ltrans && !warned && !val->odr_violated)
1660 {
1661 tree extra_base;
1662 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1663 "a type with the same name but different "
1664 "number of polymorphic bases is "
1665 "defined in another translation unit");
1666 if (warned)
1667 {
1668 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1669 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1670 extra_base = BINFO_BASE_BINFO
1671 (TYPE_BINFO (type),
1672 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1673 else
1674 extra_base = BINFO_BASE_BINFO
1675 (TYPE_BINFO (val->type),
1676 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1677 tree extra_base_type = BINFO_TYPE (extra_base);
1678 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1679 "the extra base is defined here");
1680 }
1681 }
1682 base_mismatch = true;
1683 }
1684 else
1685 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1686 {
1687 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1688 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1689 tree type1 = BINFO_TYPE (base1);
1690 tree type2 = BINFO_TYPE (base2);
1691
1692 if (types_odr_comparable (type1, type2))
1693 {
1694 if (!types_same_for_odr (type1, type2))
1695 base_mismatch = true;
1696 }
1697 else
1698 if (!odr_types_equivalent_p (type1, type2))
1699 base_mismatch = true;
1700 if (base_mismatch)
1701 {
1702 if (!warned && !val->odr_violated)
1703 {
1704 warn_odr (type, val->type, NULL, NULL,
1705 !warned, &warned,
1706 "a type with the same name but different base "
1707 "type is defined in another translation unit");
1708 if (warned)
1709 warn_types_mismatch (type1, type2,
1710 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1711 }
1712 break;
1713 }
1714 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1715 {
1716 base_mismatch = true;
1717 if (!warned && !val->odr_violated)
1718 warn_odr (type, val->type, NULL, NULL,
1719 !warned, &warned,
1720 "a type with the same name but different base "
1721 "layout is defined in another translation unit");
1722 break;
1723 }
1724 /* One of bases is not of complete type. */
1725 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1726 {
1727 /* If we have a polymorphic type info specified for TYPE1
1728 but not for TYPE2 we possibly missed a base when recording
1729 VAL->type earlier.
1730 Be sure this does not happen. */
1731 if (TYPE_BINFO (type1)
1732 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1733 && !build_bases)
1734 odr_must_violate = true;
1735 break;
1736 }
1737 /* One base is polymorphic and the other not.
1738 This ought to be diagnosed earlier, but do not ICE in the
1739 checking bellow. */
1740 else if (TYPE_BINFO (type1)
1741 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1742 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1743 {
1744 if (!warned && !val->odr_violated)
1745 warn_odr (type, val->type, NULL, NULL,
1746 !warned, &warned,
1747 "a base of the type is polymorphic only in one "
1748 "translation unit");
1749 base_mismatch = true;
1750 break;
1751 }
1752 }
1753 if (base_mismatch)
1754 {
1755 merge = false;
1756 odr_violation_reported = true;
1757 val->odr_violated = true;
1758
1759 if (symtab->dump_file)
1760 {
1761 fprintf (symtab->dump_file, "ODR base violation\n");
1762
1763 print_node (symtab->dump_file, "", val->type, 0);
1764 putc ('\n',symtab->dump_file);
1765 print_node (symtab->dump_file, "", type, 0);
1766 putc ('\n',symtab->dump_file);
1767 }
1768 }
1769 }
1770
1771 /* Next compare memory layout.
1772 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1773 We must apply the location cache to ensure that they are valid
1774 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1775 if (lto_location_cache::current_cache)
1776 lto_location_cache::current_cache->apply_location_cache ();
1777 /* As a special case we stream mangles names of integer types so we can see
1778 if they are believed to be same even though they have different
1779 representation. Avoid bogus warning on mismatches in these. */
1780 if (TREE_CODE (type) != INTEGER_TYPE
1781 && TREE_CODE (val->type) != INTEGER_TYPE
1782 && !odr_types_equivalent_p (val->type, type,
1783 !flag_ltrans && !val->odr_violated && !warned,
1784 &warned, &visited,
1785 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1786 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1787 {
1788 merge = false;
1789 odr_violation_reported = true;
1790 val->odr_violated = true;
1791 }
1792 gcc_assert (val->odr_violated || !odr_must_violate);
1793 /* Sanity check that all bases will be build same way again. */
1794 if (flag_checking
1795 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1796 && TREE_CODE (val->type) == RECORD_TYPE
1797 && TREE_CODE (type) == RECORD_TYPE
1798 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1799 && !val->odr_violated
1800 && !base_mismatch && val->bases.length ())
1801 {
1802 unsigned int num_poly_bases = 0;
1803 unsigned int j;
1804
1805 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1806 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1807 (TYPE_BINFO (type), i)))
1808 num_poly_bases++;
1809 gcc_assert (num_poly_bases == val->bases.length ());
1810 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1811 i++)
1812 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1813 (TYPE_BINFO (type), i)))
1814 {
1815 odr_type base = get_odr_type
1816 (BINFO_TYPE
1817 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1818 i)),
1819 true);
1820 gcc_assert (val->bases[j] == base);
1821 j++;
1822 }
1823 }
1824
1825
1826 /* Regularize things a little. During LTO same types may come with
1827 different BINFOs. Either because their virtual table was
1828 not merged by tree merging and only later at decl merging or
1829 because one type comes with external vtable, while other
1830 with internal. We want to merge equivalent binfos to conserve
1831 memory and streaming overhead.
1832
1833 The external vtables are more harmful: they contain references
1834 to external declarations of methods that may be defined in the
1835 merged LTO unit. For this reason we absolutely need to remove
1836 them and replace by internal variants. Not doing so will lead
1837 to incomplete answers from possible_polymorphic_call_targets.
1838
1839 FIXME: disable for now; because ODR types are now build during
1840 streaming in, the variants do not need to be linked to the type,
1841 yet. We need to do the merging in cleanup pass to be implemented
1842 soon. */
1843 if (!flag_ltrans && merge
1844 && 0
1845 && TREE_CODE (val->type) == RECORD_TYPE
1846 && TREE_CODE (type) == RECORD_TYPE
1847 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1848 && TYPE_MAIN_VARIANT (type) == type
1849 && TYPE_MAIN_VARIANT (val->type) == val->type
1850 && BINFO_VTABLE (TYPE_BINFO (val->type))
1851 && BINFO_VTABLE (TYPE_BINFO (type)))
1852 {
1853 tree master_binfo = TYPE_BINFO (val->type);
1854 tree v1 = BINFO_VTABLE (master_binfo);
1855 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1856
1857 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1858 {
1859 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1860 && operand_equal_p (TREE_OPERAND (v1, 1),
1861 TREE_OPERAND (v2, 1), 0));
1862 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1863 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1864 }
1865 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1866 == DECL_ASSEMBLER_NAME (v2));
1867
1868 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1869 {
1870 unsigned int i;
1871
1872 set_type_binfo (val->type, TYPE_BINFO (type));
1873 for (i = 0; i < val->types->length (); i++)
1874 {
1875 if (TYPE_BINFO ((*val->types)[i])
1876 == master_binfo)
1877 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1878 }
1879 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1880 }
1881 else
1882 set_type_binfo (type, master_binfo);
1883 }
1884 return build_bases;
1885 }
1886
1887 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to.
1888 FOR_DUMP_P is true when being called from the dump routines. */
1889
1890 tree
1891 obj_type_ref_class (const_tree ref, bool for_dump_p)
1892 {
1893 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1894 ref = TREE_TYPE (ref);
1895 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1896 ref = TREE_TYPE (ref);
1897 /* We look for type THIS points to. ObjC also builds
1898 OBJ_TYPE_REF with non-method calls, Their first parameter
1899 ID however also corresponds to class type. */
1900 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1901 || TREE_CODE (ref) == FUNCTION_TYPE);
1902 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1903 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1904 tree ret = TREE_TYPE (ref);
1905 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
1906 ret = TYPE_CANONICAL (ret);
1907 else if (odr_type ot = get_odr_type (ret, !for_dump_p))
1908 ret = ot->type;
1909 else
1910 gcc_assert (for_dump_p);
1911 return ret;
1912 }
1913
1914 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1915 possibly new entry. */
1916
1917 odr_type
1918 get_odr_type (tree type, bool insert)
1919 {
1920 odr_type_d **slot = NULL;
1921 odr_type val = NULL;
1922 hashval_t hash;
1923 bool build_bases = false;
1924 bool insert_to_odr_array = false;
1925 int base_id = -1;
1926
1927 type = TYPE_MAIN_VARIANT (type);
1928 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
1929 type = TYPE_CANONICAL (type);
1930
1931 gcc_checking_assert (can_be_name_hashed_p (type));
1932
1933 hash = hash_odr_name (type);
1934 slot = odr_hash->find_slot_with_hash (type, hash,
1935 insert ? INSERT : NO_INSERT);
1936
1937 if (!slot)
1938 return NULL;
1939
1940 /* See if we already have entry for type. */
1941 if (*slot)
1942 {
1943 val = *slot;
1944
1945 if (val->type != type && insert
1946 && (!val->types_set || !val->types_set->add (type)))
1947 build_bases = add_type_duplicate (val, type);
1948 }
1949 else
1950 {
1951 val = ggc_cleared_alloc<odr_type_d> ();
1952 val->type = type;
1953 val->bases = vNULL;
1954 val->derived_types = vNULL;
1955 if (type_with_linkage_p (type))
1956 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1957 else
1958 val->anonymous_namespace = 0;
1959 build_bases = COMPLETE_TYPE_P (val->type);
1960 insert_to_odr_array = true;
1961 *slot = val;
1962 }
1963
1964 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1965 && type_with_linkage_p (type)
1966 && type == TYPE_MAIN_VARIANT (type))
1967 {
1968 tree binfo = TYPE_BINFO (type);
1969 unsigned int i;
1970
1971 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1972
1973 val->all_derivations_known = type_all_derivations_known_p (type);
1974 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1975 /* For now record only polymorphic types. other are
1976 pointless for devirtualization and we cannot precisely
1977 determine ODR equivalency of these during LTO. */
1978 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1979 {
1980 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1981 odr_type base = get_odr_type (base_type, true);
1982 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1983 base->derived_types.safe_push (val);
1984 val->bases.safe_push (base);
1985 if (base->id > base_id)
1986 base_id = base->id;
1987 }
1988 }
1989 /* Ensure that type always appears after bases. */
1990 if (insert_to_odr_array)
1991 {
1992 if (odr_types_ptr)
1993 val->id = odr_types.length ();
1994 vec_safe_push (odr_types_ptr, val);
1995 }
1996 else if (base_id > val->id)
1997 {
1998 odr_types[val->id] = 0;
1999 /* Be sure we did not recorded any derived types; these may need
2000 renumbering too. */
2001 gcc_assert (val->derived_types.length() == 0);
2002 val->id = odr_types.length ();
2003 vec_safe_push (odr_types_ptr, val);
2004 }
2005 return val;
2006 }
2007
2008 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
2009 on ODR violations. */
2010
2011 tree
2012 prevailing_odr_type (tree type)
2013 {
2014 odr_type t = get_odr_type (type, false);
2015 if (!t || t->odr_violated)
2016 return type;
2017 return t->type;
2018 }
2019
2020 /* Set tbaa_enabled flag for TYPE. */
2021
2022 void
2023 enable_odr_based_tbaa (tree type)
2024 {
2025 odr_type t = get_odr_type (type, true);
2026 t->tbaa_enabled = true;
2027 }
2028
2029 /* True if canonical type of TYPE is determined using ODR name. */
2030
2031 bool
2032 odr_based_tbaa_p (const_tree type)
2033 {
2034 if (!RECORD_OR_UNION_TYPE_P (type))
2035 return false;
2036 if (!odr_hash)
2037 return false;
2038 odr_type t = get_odr_type (const_cast <tree> (type), false);
2039 if (!t || !t->tbaa_enabled)
2040 return false;
2041 return true;
2042 }
2043
2044 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2045 to CANONICAL. */
2046
2047 void
2048 set_type_canonical_for_odr_type (tree type, tree canonical)
2049 {
2050 odr_type t = get_odr_type (type, false);
2051 unsigned int i;
2052 tree tt;
2053
2054 for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2055 TYPE_CANONICAL (t2) = canonical;
2056 if (t->types)
2057 FOR_EACH_VEC_ELT (*t->types, i, tt)
2058 for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2059 TYPE_CANONICAL (t2) = canonical;
2060 }
2061
2062 /* Return true if we reported some ODR violation on TYPE. */
2063
2064 bool
2065 odr_type_violation_reported_p (tree type)
2066 {
2067 return get_odr_type (type, false)->odr_violated;
2068 }
2069
2070 /* Add TYPE of ODR type hash. */
2071
2072 void
2073 register_odr_type (tree type)
2074 {
2075 if (!odr_hash)
2076 odr_hash = new odr_hash_type (23);
2077 if (type == TYPE_MAIN_VARIANT (type))
2078 {
2079 /* To get ODR warnings right, first register all sub-types. */
2080 if (RECORD_OR_UNION_TYPE_P (type)
2081 && COMPLETE_TYPE_P (type))
2082 {
2083 /* Limit recursion on types which are already registered. */
2084 odr_type ot = get_odr_type (type, false);
2085 if (ot
2086 && (ot->type == type
2087 || (ot->types_set
2088 && ot->types_set->contains (type))))
2089 return;
2090 for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2091 if (TREE_CODE (f) == FIELD_DECL)
2092 {
2093 tree subtype = TREE_TYPE (f);
2094
2095 while (TREE_CODE (subtype) == ARRAY_TYPE)
2096 subtype = TREE_TYPE (subtype);
2097 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2098 register_odr_type (TYPE_MAIN_VARIANT (subtype));
2099 }
2100 if (TYPE_BINFO (type))
2101 for (unsigned int i = 0;
2102 i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2103 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2104 (TYPE_BINFO (type), i)));
2105 }
2106 get_odr_type (type, true);
2107 }
2108 }
2109
2110 /* Return true if type is known to have no derivations. */
2111
2112 bool
2113 type_known_to_have_no_derivations_p (tree t)
2114 {
2115 return (type_all_derivations_known_p (t)
2116 && (TYPE_FINAL_P (t)
2117 || (odr_hash
2118 && !get_odr_type (t, true)->derived_types.length())));
2119 }
2120
2121 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2122 recursive printing. */
2123
2124 static void
2125 dump_odr_type (FILE *f, odr_type t, int indent=0)
2126 {
2127 unsigned int i;
2128 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2129 print_generic_expr (f, t->type, TDF_SLIM);
2130 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2131 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2132 if (TYPE_NAME (t->type))
2133 {
2134 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2135 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2136 IDENTIFIER_POINTER
2137 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2138 }
2139 if (t->bases.length ())
2140 {
2141 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2142 for (i = 0; i < t->bases.length (); i++)
2143 fprintf (f, " %i", t->bases[i]->id);
2144 fprintf (f, "\n");
2145 }
2146 if (t->derived_types.length ())
2147 {
2148 fprintf (f, "%*s derived types:\n", indent * 2, "");
2149 for (i = 0; i < t->derived_types.length (); i++)
2150 dump_odr_type (f, t->derived_types[i], indent + 1);
2151 }
2152 fprintf (f, "\n");
2153 }
2154
2155 /* Dump the type inheritance graph. */
2156
2157 static void
2158 dump_type_inheritance_graph (FILE *f)
2159 {
2160 unsigned int i;
2161 unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2162 if (!odr_types_ptr)
2163 return;
2164 fprintf (f, "\n\nType inheritance graph:\n");
2165 for (i = 0; i < odr_types.length (); i++)
2166 {
2167 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2168 dump_odr_type (f, odr_types[i]);
2169 }
2170 for (i = 0; i < odr_types.length (); i++)
2171 {
2172 if (!odr_types[i])
2173 continue;
2174
2175 num_all_types++;
2176 if (!odr_types[i]->types || !odr_types[i]->types->length ())
2177 continue;
2178
2179 /* To aid ODR warnings we also mangle integer constants but do
2180 not consider duplicates there. */
2181 if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2182 continue;
2183
2184 /* It is normal to have one duplicate and one normal variant. */
2185 if (odr_types[i]->types->length () == 1
2186 && COMPLETE_TYPE_P (odr_types[i]->type)
2187 && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2188 continue;
2189
2190 num_types ++;
2191
2192 unsigned int j;
2193 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2194 print_node (f, "", odr_types[i]->type, 0);
2195 print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2196 putc ('\n',f);
2197 for (j = 0; j < odr_types[i]->types->length (); j++)
2198 {
2199 tree t;
2200 num_duplicates ++;
2201 fprintf (f, "duplicate #%i\n", j);
2202 print_node (f, "", (*odr_types[i]->types)[j], 0);
2203 t = (*odr_types[i]->types)[j];
2204 while (TYPE_P (t) && TYPE_CONTEXT (t))
2205 {
2206 t = TYPE_CONTEXT (t);
2207 print_node (f, "", t, 0);
2208 }
2209 print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2210 putc ('\n',f);
2211 }
2212 }
2213 fprintf (f, "Out of %i types there are %i types with duplicates; "
2214 "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2215 }
2216
2217 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2218 ODR warnings.
2219 We make TYPE_DECLs to not point back
2220 to the type (which is needed to keep them in the same SCC and preserve
2221 location information to output warnings) and subsequently we make all
2222 TYPE_DECLS of same assembler name equivalent. */
2223
2224 static void
2225 free_odr_warning_data ()
2226 {
2227 static bool odr_data_freed = false;
2228
2229 if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2230 return;
2231
2232 odr_data_freed = true;
2233
2234 for (unsigned int i = 0; i < odr_types.length (); i++)
2235 if (odr_types[i])
2236 {
2237 tree t = odr_types[i]->type;
2238
2239 TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2240
2241 if (odr_types[i]->types)
2242 for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2243 {
2244 tree td = (*odr_types[i]->types)[j];
2245
2246 TYPE_NAME (td) = TYPE_NAME (t);
2247 }
2248 }
2249 odr_data_freed = true;
2250 }
2251
2252 /* Initialize IPA devirt and build inheritance tree graph. */
2253
2254 void
2255 build_type_inheritance_graph (void)
2256 {
2257 struct symtab_node *n;
2258 FILE *inheritance_dump_file;
2259 dump_flags_t flags;
2260
2261 if (odr_hash)
2262 {
2263 free_odr_warning_data ();
2264 return;
2265 }
2266 timevar_push (TV_IPA_INHERITANCE);
2267 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2268 odr_hash = new odr_hash_type (23);
2269
2270 /* We reconstruct the graph starting of types of all methods seen in the
2271 unit. */
2272 FOR_EACH_SYMBOL (n)
2273 if (is_a <cgraph_node *> (n)
2274 && DECL_VIRTUAL_P (n->decl)
2275 && n->real_symbol_p ())
2276 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2277
2278 /* Look also for virtual tables of types that do not define any methods.
2279
2280 We need it in a case where class B has virtual base of class A
2281 re-defining its virtual method and there is class C with no virtual
2282 methods with B as virtual base.
2283
2284 Here we output B's virtual method in two variant - for non-virtual
2285 and virtual inheritance. B's virtual table has non-virtual version,
2286 while C's has virtual.
2287
2288 For this reason we need to know about C in order to include both
2289 variants of B. More correctly, record_target_from_binfo should
2290 add both variants of the method when walking B, but we have no
2291 link in between them.
2292
2293 We rely on fact that either the method is exported and thus we
2294 assume it is called externally or C is in anonymous namespace and
2295 thus we will see the vtable. */
2296
2297 else if (is_a <varpool_node *> (n)
2298 && DECL_VIRTUAL_P (n->decl)
2299 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2300 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2301 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2302 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2303 if (inheritance_dump_file)
2304 {
2305 dump_type_inheritance_graph (inheritance_dump_file);
2306 dump_end (TDI_inheritance, inheritance_dump_file);
2307 }
2308 free_odr_warning_data ();
2309 timevar_pop (TV_IPA_INHERITANCE);
2310 }
2311
2312 /* Return true if N has reference from live virtual table
2313 (and thus can be a destination of polymorphic call).
2314 Be conservatively correct when callgraph is not built or
2315 if the method may be referred externally. */
2316
2317 static bool
2318 referenced_from_vtable_p (struct cgraph_node *node)
2319 {
2320 int i;
2321 struct ipa_ref *ref;
2322 bool found = false;
2323
2324 if (node->externally_visible
2325 || DECL_EXTERNAL (node->decl)
2326 || node->used_from_other_partition)
2327 return true;
2328
2329 /* Keep this test constant time.
2330 It is unlikely this can happen except for the case where speculative
2331 devirtualization introduced many speculative edges to this node.
2332 In this case the target is very likely alive anyway. */
2333 if (node->ref_list.referring.length () > 100)
2334 return true;
2335
2336 /* We need references built. */
2337 if (symtab->state <= CONSTRUCTION)
2338 return true;
2339
2340 for (i = 0; node->iterate_referring (i, ref); i++)
2341 if ((ref->use == IPA_REF_ALIAS
2342 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2343 || (ref->use == IPA_REF_ADDR
2344 && VAR_P (ref->referring->decl)
2345 && DECL_VIRTUAL_P (ref->referring->decl)))
2346 {
2347 found = true;
2348 break;
2349 }
2350 return found;
2351 }
2352
2353 /* Return if TARGET is cxa_pure_virtual. */
2354
2355 static bool
2356 is_cxa_pure_virtual_p (tree target)
2357 {
2358 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2359 && DECL_NAME (target)
2360 && id_equal (DECL_NAME (target),
2361 "__cxa_pure_virtual");
2362 }
2363
2364 /* If TARGET has associated node, record it in the NODES array.
2365 CAN_REFER specify if program can refer to the target directly.
2366 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2367 its body was already removed and there is no way to refer to it), clear
2368 COMPLETEP. */
2369
2370 static void
2371 maybe_record_node (vec <cgraph_node *> &nodes,
2372 tree target, hash_set<tree> *inserted,
2373 bool can_refer,
2374 bool *completep)
2375 {
2376 struct cgraph_node *target_node, *alias_target;
2377 enum availability avail;
2378 bool pure_virtual = is_cxa_pure_virtual_p (target);
2379
2380 /* __builtin_unreachable do not need to be added into
2381 list of targets; the runtime effect of calling them is undefined.
2382 Only "real" virtual methods should be accounted. */
2383 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2384 return;
2385
2386 if (!can_refer)
2387 {
2388 /* The only case when method of anonymous namespace becomes unreferable
2389 is when we completely optimized it out. */
2390 if (flag_ltrans
2391 || !target
2392 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2393 *completep = false;
2394 return;
2395 }
2396
2397 if (!target)
2398 return;
2399
2400 target_node = cgraph_node::get (target);
2401
2402 /* Prefer alias target over aliases, so we do not get confused by
2403 fake duplicates. */
2404 if (target_node)
2405 {
2406 alias_target = target_node->ultimate_alias_target (&avail);
2407 if (target_node != alias_target
2408 && avail >= AVAIL_AVAILABLE
2409 && target_node->get_availability ())
2410 target_node = alias_target;
2411 }
2412
2413 /* Method can only be called by polymorphic call if any
2414 of vtables referring to it are alive.
2415
2416 While this holds for non-anonymous functions, too, there are
2417 cases where we want to keep them in the list; for example
2418 inline functions with -fno-weak are static, but we still
2419 may devirtualize them when instance comes from other unit.
2420 The same holds for LTO.
2421
2422 Currently we ignore these functions in speculative devirtualization.
2423 ??? Maybe it would make sense to be more aggressive for LTO even
2424 elsewhere. */
2425 if (!flag_ltrans
2426 && !pure_virtual
2427 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2428 && (!target_node
2429 || !referenced_from_vtable_p (target_node)))
2430 ;
2431 /* See if TARGET is useful function we can deal with. */
2432 else if (target_node != NULL
2433 && (TREE_PUBLIC (target)
2434 || DECL_EXTERNAL (target)
2435 || target_node->definition)
2436 && target_node->real_symbol_p ())
2437 {
2438 gcc_assert (!target_node->inlined_to);
2439 gcc_assert (target_node->real_symbol_p ());
2440 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2441 by valid program. */
2442 if (flag_sanitize & SANITIZE_UNREACHABLE)
2443 ;
2444 /* Only add pure virtual if it is the only possible target. This way
2445 we will preserve the diagnostics about pure virtual called in many
2446 cases without disabling optimization in other. */
2447 else if (pure_virtual)
2448 {
2449 if (nodes.length ())
2450 return;
2451 }
2452 /* If we found a real target, take away cxa_pure_virtual. */
2453 else if (!pure_virtual && nodes.length () == 1
2454 && is_cxa_pure_virtual_p (nodes[0]->decl))
2455 nodes.pop ();
2456 if (pure_virtual && nodes.length ())
2457 return;
2458 if (!inserted->add (target))
2459 {
2460 cached_polymorphic_call_targets->add (target_node);
2461 nodes.safe_push (target_node);
2462 }
2463 }
2464 else if (!completep)
2465 ;
2466 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2467 optimized out or partitioned to other unit) so we cannot add it. When
2468 not sanitizing, there is nothing to do.
2469 Otherwise declare the list incomplete. */
2470 else if (pure_virtual)
2471 {
2472 if (flag_sanitize & SANITIZE_UNREACHABLE)
2473 *completep = false;
2474 }
2475 else if (flag_ltrans
2476 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2477 *completep = false;
2478 }
2479
2480 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2481 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2482 method in vtable and insert method to NODES array
2483 or BASES_TO_CONSIDER if this array is non-NULL.
2484 Otherwise recurse to base BINFOs.
2485 This matches what get_binfo_at_offset does, but with offset
2486 being unknown.
2487
2488 TYPE_BINFOS is a stack of BINFOS of types with defined
2489 virtual table seen on way from class type to BINFO.
2490
2491 MATCHED_VTABLES tracks virtual tables we already did lookup
2492 for virtual function in. INSERTED tracks nodes we already
2493 inserted.
2494
2495 ANONYMOUS is true if BINFO is part of anonymous namespace.
2496
2497 Clear COMPLETEP when we hit unreferable target.
2498 */
2499
2500 static void
2501 record_target_from_binfo (vec <cgraph_node *> &nodes,
2502 vec <tree> *bases_to_consider,
2503 tree binfo,
2504 tree otr_type,
2505 vec <tree> &type_binfos,
2506 HOST_WIDE_INT otr_token,
2507 tree outer_type,
2508 HOST_WIDE_INT offset,
2509 hash_set<tree> *inserted,
2510 hash_set<tree> *matched_vtables,
2511 bool anonymous,
2512 bool *completep)
2513 {
2514 tree type = BINFO_TYPE (binfo);
2515 int i;
2516 tree base_binfo;
2517
2518
2519 if (BINFO_VTABLE (binfo))
2520 type_binfos.safe_push (binfo);
2521 if (types_same_for_odr (type, outer_type))
2522 {
2523 int i;
2524 tree type_binfo = NULL;
2525
2526 /* Look up BINFO with virtual table. For normal types it is always last
2527 binfo on stack. */
2528 for (i = type_binfos.length () - 1; i >= 0; i--)
2529 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2530 {
2531 type_binfo = type_binfos[i];
2532 break;
2533 }
2534 if (BINFO_VTABLE (binfo))
2535 type_binfos.pop ();
2536 /* If this is duplicated BINFO for base shared by virtual inheritance,
2537 we may not have its associated vtable. This is not a problem, since
2538 we will walk it on the other path. */
2539 if (!type_binfo)
2540 return;
2541 tree inner_binfo = get_binfo_at_offset (type_binfo,
2542 offset, otr_type);
2543 if (!inner_binfo)
2544 {
2545 gcc_assert (odr_violation_reported);
2546 return;
2547 }
2548 /* For types in anonymous namespace first check if the respective vtable
2549 is alive. If not, we know the type can't be called. */
2550 if (!flag_ltrans && anonymous)
2551 {
2552 tree vtable = BINFO_VTABLE (inner_binfo);
2553 varpool_node *vnode;
2554
2555 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2556 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2557 vnode = varpool_node::get (vtable);
2558 if (!vnode || !vnode->definition)
2559 return;
2560 }
2561 gcc_assert (inner_binfo);
2562 if (bases_to_consider
2563 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2564 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2565 {
2566 bool can_refer;
2567 tree target = gimple_get_virt_method_for_binfo (otr_token,
2568 inner_binfo,
2569 &can_refer);
2570 if (!bases_to_consider)
2571 maybe_record_node (nodes, target, inserted, can_refer, completep);
2572 /* Destructors are never called via construction vtables. */
2573 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2574 bases_to_consider->safe_push (target);
2575 }
2576 return;
2577 }
2578
2579 /* Walk bases. */
2580 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2581 /* Walking bases that have no virtual method is pointless exercise. */
2582 if (polymorphic_type_binfo_p (base_binfo))
2583 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2584 type_binfos,
2585 otr_token, outer_type, offset, inserted,
2586 matched_vtables, anonymous, completep);
2587 if (BINFO_VTABLE (binfo))
2588 type_binfos.pop ();
2589 }
2590
2591 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2592 of TYPE, insert them to NODES, recurse into derived nodes.
2593 INSERTED is used to avoid duplicate insertions of methods into NODES.
2594 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2595 Clear COMPLETEP if unreferable target is found.
2596
2597 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2598 all cases where BASE_SKIPPED is true (because the base is abstract
2599 class). */
2600
2601 static void
2602 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2603 hash_set<tree> *inserted,
2604 hash_set<tree> *matched_vtables,
2605 tree otr_type,
2606 odr_type type,
2607 HOST_WIDE_INT otr_token,
2608 tree outer_type,
2609 HOST_WIDE_INT offset,
2610 bool *completep,
2611 vec <tree> &bases_to_consider,
2612 bool consider_construction)
2613 {
2614 tree binfo = TYPE_BINFO (type->type);
2615 unsigned int i;
2616 auto_vec <tree, 8> type_binfos;
2617 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2618
2619 /* We may need to consider types w/o instances because of possible derived
2620 types using their methods either directly or via construction vtables.
2621 We are safe to skip them when all derivations are known, since we will
2622 handle them later.
2623 This is done by recording them to BASES_TO_CONSIDER array. */
2624 if (possibly_instantiated || consider_construction)
2625 {
2626 record_target_from_binfo (nodes,
2627 (!possibly_instantiated
2628 && type_all_derivations_known_p (type->type))
2629 ? &bases_to_consider : NULL,
2630 binfo, otr_type, type_binfos, otr_token,
2631 outer_type, offset,
2632 inserted, matched_vtables,
2633 type->anonymous_namespace, completep);
2634 }
2635 for (i = 0; i < type->derived_types.length (); i++)
2636 possible_polymorphic_call_targets_1 (nodes, inserted,
2637 matched_vtables,
2638 otr_type,
2639 type->derived_types[i],
2640 otr_token, outer_type, offset, completep,
2641 bases_to_consider, consider_construction);
2642 }
2643
2644 /* Cache of queries for polymorphic call targets.
2645
2646 Enumerating all call targets may get expensive when there are many
2647 polymorphic calls in the program, so we memoize all the previous
2648 queries and avoid duplicated work. */
2649
2650 class polymorphic_call_target_d
2651 {
2652 public:
2653 HOST_WIDE_INT otr_token;
2654 ipa_polymorphic_call_context context;
2655 odr_type type;
2656 vec <cgraph_node *> targets;
2657 tree decl_warning;
2658 int type_warning;
2659 unsigned int n_odr_types;
2660 bool complete;
2661 bool speculative;
2662 };
2663
2664 /* Polymorphic call target cache helpers. */
2665
2666 struct polymorphic_call_target_hasher
2667 : pointer_hash <polymorphic_call_target_d>
2668 {
2669 static inline hashval_t hash (const polymorphic_call_target_d *);
2670 static inline bool equal (const polymorphic_call_target_d *,
2671 const polymorphic_call_target_d *);
2672 static inline void remove (polymorphic_call_target_d *);
2673 };
2674
2675 /* Return the computed hashcode for ODR_QUERY. */
2676
2677 inline hashval_t
2678 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2679 {
2680 inchash::hash hstate (odr_query->otr_token);
2681
2682 hstate.add_hwi (odr_query->type->id);
2683 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2684 hstate.add_hwi (odr_query->context.offset);
2685 hstate.add_hwi (odr_query->n_odr_types);
2686
2687 if (odr_query->context.speculative_outer_type)
2688 {
2689 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2690 hstate.add_hwi (odr_query->context.speculative_offset);
2691 }
2692 hstate.add_flag (odr_query->speculative);
2693 hstate.add_flag (odr_query->context.maybe_in_construction);
2694 hstate.add_flag (odr_query->context.maybe_derived_type);
2695 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2696 hstate.commit_flag ();
2697 return hstate.end ();
2698 }
2699
2700 /* Compare cache entries T1 and T2. */
2701
2702 inline bool
2703 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2704 const polymorphic_call_target_d *t2)
2705 {
2706 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2707 && t1->speculative == t2->speculative
2708 && t1->context.offset == t2->context.offset
2709 && t1->context.speculative_offset == t2->context.speculative_offset
2710 && t1->context.outer_type == t2->context.outer_type
2711 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2712 && t1->context.maybe_in_construction
2713 == t2->context.maybe_in_construction
2714 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2715 && (t1->context.speculative_maybe_derived_type
2716 == t2->context.speculative_maybe_derived_type)
2717 /* Adding new type may affect outcome of target search. */
2718 && t1->n_odr_types == t2->n_odr_types);
2719 }
2720
2721 /* Remove entry in polymorphic call target cache hash. */
2722
2723 inline void
2724 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2725 {
2726 v->targets.release ();
2727 free (v);
2728 }
2729
2730 /* Polymorphic call target query cache. */
2731
2732 typedef hash_table<polymorphic_call_target_hasher>
2733 polymorphic_call_target_hash_type;
2734 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2735
2736 /* Destroy polymorphic call target query cache. */
2737
2738 static void
2739 free_polymorphic_call_targets_hash ()
2740 {
2741 if (cached_polymorphic_call_targets)
2742 {
2743 delete polymorphic_call_target_hash;
2744 polymorphic_call_target_hash = NULL;
2745 delete cached_polymorphic_call_targets;
2746 cached_polymorphic_call_targets = NULL;
2747 }
2748 }
2749
2750 /* Force rebuilding type inheritance graph from scratch.
2751 This is use to make sure that we do not keep references to types
2752 which was not visible to free_lang_data. */
2753
2754 void
2755 rebuild_type_inheritance_graph ()
2756 {
2757 if (!odr_hash)
2758 return;
2759 delete odr_hash;
2760 odr_hash = NULL;
2761 odr_types_ptr = NULL;
2762 free_polymorphic_call_targets_hash ();
2763 }
2764
2765 /* When virtual function is removed, we may need to flush the cache. */
2766
2767 static void
2768 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2769 {
2770 if (cached_polymorphic_call_targets
2771 && !thunk_expansion
2772 && cached_polymorphic_call_targets->contains (n))
2773 free_polymorphic_call_targets_hash ();
2774 }
2775
2776 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2777
2778 tree
2779 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2780 tree vtable)
2781 {
2782 tree v = BINFO_VTABLE (binfo);
2783 int i;
2784 tree base_binfo;
2785 unsigned HOST_WIDE_INT this_offset;
2786
2787 if (v)
2788 {
2789 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2790 gcc_unreachable ();
2791
2792 if (offset == this_offset
2793 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2794 return binfo;
2795 }
2796
2797 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2798 if (polymorphic_type_binfo_p (base_binfo))
2799 {
2800 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2801 if (base_binfo)
2802 return base_binfo;
2803 }
2804 return NULL;
2805 }
2806
2807 /* T is known constant value of virtual table pointer.
2808 Store virtual table to V and its offset to OFFSET.
2809 Return false if T does not look like virtual table reference. */
2810
2811 bool
2812 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2813 unsigned HOST_WIDE_INT *offset)
2814 {
2815 /* We expect &MEM[(void *)&virtual_table + 16B].
2816 We obtain object's BINFO from the context of the virtual table.
2817 This one contains pointer to virtual table represented via
2818 POINTER_PLUS_EXPR. Verify that this pointer matches what
2819 we propagated through.
2820
2821 In the case of virtual inheritance, the virtual tables may
2822 be nested, i.e. the offset may be different from 16 and we may
2823 need to dive into the type representation. */
2824 if (TREE_CODE (t) == ADDR_EXPR
2825 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2827 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2828 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2829 == VAR_DECL)
2830 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2831 (TREE_OPERAND (t, 0), 0), 0)))
2832 {
2833 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2834 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2835 return true;
2836 }
2837
2838 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2839 We need to handle it when T comes from static variable initializer or
2840 BINFO. */
2841 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2842 {
2843 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2844 t = TREE_OPERAND (t, 0);
2845 }
2846 else
2847 *offset = 0;
2848
2849 if (TREE_CODE (t) != ADDR_EXPR)
2850 return false;
2851 *v = TREE_OPERAND (t, 0);
2852 return true;
2853 }
2854
2855 /* T is known constant value of virtual table pointer. Return BINFO of the
2856 instance type. */
2857
2858 tree
2859 vtable_pointer_value_to_binfo (const_tree t)
2860 {
2861 tree vtable;
2862 unsigned HOST_WIDE_INT offset;
2863
2864 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2865 return NULL_TREE;
2866
2867 /* FIXME: for stores of construction vtables we return NULL,
2868 because we do not have BINFO for those. Eventually we should fix
2869 our representation to allow this case to be handled, too.
2870 In the case we see store of BINFO we however may assume
2871 that standard folding will be able to cope with it. */
2872 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2873 offset, vtable);
2874 }
2875
2876 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2877 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2878 and insert them in NODES.
2879
2880 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2881
2882 static void
2883 record_targets_from_bases (tree otr_type,
2884 HOST_WIDE_INT otr_token,
2885 tree outer_type,
2886 HOST_WIDE_INT offset,
2887 vec <cgraph_node *> &nodes,
2888 hash_set<tree> *inserted,
2889 hash_set<tree> *matched_vtables,
2890 bool *completep)
2891 {
2892 while (true)
2893 {
2894 HOST_WIDE_INT pos, size;
2895 tree base_binfo;
2896 tree fld;
2897
2898 if (types_same_for_odr (outer_type, otr_type))
2899 return;
2900
2901 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2902 {
2903 if (TREE_CODE (fld) != FIELD_DECL)
2904 continue;
2905
2906 pos = int_bit_position (fld);
2907 size = tree_to_shwi (DECL_SIZE (fld));
2908 if (pos <= offset && (pos + size) > offset
2909 /* Do not get confused by zero sized bases. */
2910 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2911 break;
2912 }
2913 /* Within a class type we should always find corresponding fields. */
2914 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2915
2916 /* Nonbase types should have been stripped by outer_class_type. */
2917 gcc_assert (DECL_ARTIFICIAL (fld));
2918
2919 outer_type = TREE_TYPE (fld);
2920 offset -= pos;
2921
2922 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2923 offset, otr_type);
2924 if (!base_binfo)
2925 {
2926 gcc_assert (odr_violation_reported);
2927 return;
2928 }
2929 gcc_assert (base_binfo);
2930 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2931 {
2932 bool can_refer;
2933 tree target = gimple_get_virt_method_for_binfo (otr_token,
2934 base_binfo,
2935 &can_refer);
2936 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2937 maybe_record_node (nodes, target, inserted, can_refer, completep);
2938 matched_vtables->add (BINFO_VTABLE (base_binfo));
2939 }
2940 }
2941 }
2942
2943 /* When virtual table is removed, we may need to flush the cache. */
2944
2945 static void
2946 devirt_variable_node_removal_hook (varpool_node *n,
2947 void *d ATTRIBUTE_UNUSED)
2948 {
2949 if (cached_polymorphic_call_targets
2950 && DECL_VIRTUAL_P (n->decl)
2951 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2952 free_polymorphic_call_targets_hash ();
2953 }
2954
2955 /* Record about how many calls would benefit from given type to be final. */
2956
2957 struct odr_type_warn_count
2958 {
2959 tree type;
2960 int count;
2961 profile_count dyn_count;
2962 };
2963
2964 /* Record about how many calls would benefit from given method to be final. */
2965
2966 struct decl_warn_count
2967 {
2968 tree decl;
2969 int count;
2970 profile_count dyn_count;
2971 };
2972
2973 /* Information about type and decl warnings. */
2974
2975 class final_warning_record
2976 {
2977 public:
2978 /* If needed grow type_warnings vector and initialize new decl_warn_count
2979 to have dyn_count set to profile_count::zero (). */
2980 void grow_type_warnings (unsigned newlen);
2981
2982 profile_count dyn_count;
2983 auto_vec<odr_type_warn_count> type_warnings;
2984 hash_map<tree, decl_warn_count> decl_warnings;
2985 };
2986
2987 void
2988 final_warning_record::grow_type_warnings (unsigned newlen)
2989 {
2990 unsigned len = type_warnings.length ();
2991 if (newlen > len)
2992 {
2993 type_warnings.safe_grow_cleared (newlen, true);
2994 for (unsigned i = len; i < newlen; i++)
2995 type_warnings[i].dyn_count = profile_count::zero ();
2996 }
2997 }
2998
2999 class final_warning_record *final_warning_records;
3000
3001 /* Return vector containing possible targets of polymorphic call of type
3002 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3003 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3004 OTR_TYPE and include their virtual method. This is useful for types
3005 possibly in construction or destruction where the virtual table may
3006 temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
3007 us to walk the inheritance graph for all derivations.
3008
3009 If COMPLETEP is non-NULL, store true if the list is complete.
3010 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3011 in the target cache. If user needs to visit every target list
3012 just once, it can memoize them.
3013
3014 If SPECULATIVE is set, the list will not contain targets that
3015 are not speculatively taken.
3016
3017 Returned vector is placed into cache. It is NOT caller's responsibility
3018 to free it. The vector can be freed on cgraph_remove_node call if
3019 the particular node is a virtual function present in the cache. */
3020
3021 vec <cgraph_node *>
3022 possible_polymorphic_call_targets (tree otr_type,
3023 HOST_WIDE_INT otr_token,
3024 ipa_polymorphic_call_context context,
3025 bool *completep,
3026 void **cache_token,
3027 bool speculative)
3028 {
3029 static struct cgraph_node_hook_list *node_removal_hook_holder;
3030 vec <cgraph_node *> nodes = vNULL;
3031 auto_vec <tree, 8> bases_to_consider;
3032 odr_type type, outer_type;
3033 polymorphic_call_target_d key;
3034 polymorphic_call_target_d **slot;
3035 unsigned int i;
3036 tree binfo, target;
3037 bool complete;
3038 bool can_refer = false;
3039 bool skipped = false;
3040
3041 otr_type = TYPE_MAIN_VARIANT (otr_type);
3042
3043 /* If ODR is not initialized or the context is invalid, return empty
3044 incomplete list. */
3045 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3046 {
3047 if (completep)
3048 *completep = context.invalid;
3049 if (cache_token)
3050 *cache_token = NULL;
3051 return nodes;
3052 }
3053
3054 /* Do not bother to compute speculative info when user do not asks for it. */
3055 if (!speculative || !context.speculative_outer_type)
3056 context.clear_speculation ();
3057
3058 type = get_odr_type (otr_type, true);
3059
3060 /* Recording type variants would waste results cache. */
3061 gcc_assert (!context.outer_type
3062 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3063
3064 /* Look up the outer class type we want to walk.
3065 If we fail to do so, the context is invalid. */
3066 if ((context.outer_type || context.speculative_outer_type)
3067 && !context.restrict_to_inner_class (otr_type))
3068 {
3069 if (completep)
3070 *completep = true;
3071 if (cache_token)
3072 *cache_token = NULL;
3073 return nodes;
3074 }
3075 gcc_assert (!context.invalid);
3076
3077 /* Check that restrict_to_inner_class kept the main variant. */
3078 gcc_assert (!context.outer_type
3079 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3080
3081 /* We canonicalize our query, so we do not need extra hashtable entries. */
3082
3083 /* Without outer type, we have no use for offset. Just do the
3084 basic search from inner type. */
3085 if (!context.outer_type)
3086 context.clear_outer_type (otr_type);
3087 /* We need to update our hierarchy if the type does not exist. */
3088 outer_type = get_odr_type (context.outer_type, true);
3089 /* If the type is complete, there are no derivations. */
3090 if (TYPE_FINAL_P (outer_type->type))
3091 context.maybe_derived_type = false;
3092
3093 /* Initialize query cache. */
3094 if (!cached_polymorphic_call_targets)
3095 {
3096 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3097 polymorphic_call_target_hash
3098 = new polymorphic_call_target_hash_type (23);
3099 if (!node_removal_hook_holder)
3100 {
3101 node_removal_hook_holder =
3102 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3103 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3104 NULL);
3105 }
3106 }
3107
3108 if (in_lto_p)
3109 {
3110 if (context.outer_type != otr_type)
3111 context.outer_type
3112 = get_odr_type (context.outer_type, true)->type;
3113 if (context.speculative_outer_type)
3114 context.speculative_outer_type
3115 = get_odr_type (context.speculative_outer_type, true)->type;
3116 }
3117
3118 /* Look up cached answer. */
3119 key.type = type;
3120 key.otr_token = otr_token;
3121 key.speculative = speculative;
3122 key.context = context;
3123 key.n_odr_types = odr_types.length ();
3124 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3125 if (cache_token)
3126 *cache_token = (void *)*slot;
3127 if (*slot)
3128 {
3129 if (completep)
3130 *completep = (*slot)->complete;
3131 if ((*slot)->type_warning && final_warning_records)
3132 {
3133 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3134 if (!final_warning_records->type_warnings
3135 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3136 final_warning_records->type_warnings
3137 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3138 if (final_warning_records->dyn_count > 0)
3139 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3140 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3141 + final_warning_records->dyn_count;
3142 }
3143 if (!speculative && (*slot)->decl_warning && final_warning_records)
3144 {
3145 struct decl_warn_count *c =
3146 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3147 c->count++;
3148 if (final_warning_records->dyn_count > 0)
3149 c->dyn_count += final_warning_records->dyn_count;
3150 }
3151 return (*slot)->targets;
3152 }
3153
3154 complete = true;
3155
3156 /* Do actual search. */
3157 timevar_push (TV_IPA_VIRTUAL_CALL);
3158 *slot = XCNEW (polymorphic_call_target_d);
3159 if (cache_token)
3160 *cache_token = (void *)*slot;
3161 (*slot)->type = type;
3162 (*slot)->otr_token = otr_token;
3163 (*slot)->context = context;
3164 (*slot)->speculative = speculative;
3165
3166 hash_set<tree> inserted;
3167 hash_set<tree> matched_vtables;
3168
3169 /* First insert targets we speculatively identified as likely. */
3170 if (context.speculative_outer_type)
3171 {
3172 odr_type speculative_outer_type;
3173 bool speculation_complete = true;
3174
3175 /* First insert target from type itself and check if it may have
3176 derived types. */
3177 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3178 if (TYPE_FINAL_P (speculative_outer_type->type))
3179 context.speculative_maybe_derived_type = false;
3180 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3181 context.speculative_offset, otr_type);
3182 if (binfo)
3183 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3184 &can_refer);
3185 else
3186 target = NULL;
3187
3188 /* In the case we get complete method, we don't need
3189 to walk derivations. */
3190 if (target && DECL_FINAL_P (target))
3191 context.speculative_maybe_derived_type = false;
3192 if (type_possibly_instantiated_p (speculative_outer_type->type))
3193 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3194 if (binfo)
3195 matched_vtables.add (BINFO_VTABLE (binfo));
3196
3197
3198 /* Next walk recursively all derived types. */
3199 if (context.speculative_maybe_derived_type)
3200 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3201 possible_polymorphic_call_targets_1 (nodes, &inserted,
3202 &matched_vtables,
3203 otr_type,
3204 speculative_outer_type->derived_types[i],
3205 otr_token, speculative_outer_type->type,
3206 context.speculative_offset,
3207 &speculation_complete,
3208 bases_to_consider,
3209 false);
3210 }
3211
3212 if (!speculative || !nodes.length ())
3213 {
3214 /* First see virtual method of type itself. */
3215 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3216 context.offset, otr_type);
3217 if (binfo)
3218 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3219 &can_refer);
3220 else
3221 {
3222 gcc_assert (odr_violation_reported);
3223 target = NULL;
3224 }
3225
3226 /* Destructors are never called through construction virtual tables,
3227 because the type is always known. */
3228 if (target && DECL_CXX_DESTRUCTOR_P (target))
3229 context.maybe_in_construction = false;
3230
3231 if (target)
3232 {
3233 /* In the case we get complete method, we don't need
3234 to walk derivations. */
3235 if (DECL_FINAL_P (target))
3236 context.maybe_derived_type = false;
3237 }
3238
3239 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3240 if (type_possibly_instantiated_p (outer_type->type))
3241 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3242 else
3243 skipped = true;
3244
3245 if (binfo)
3246 matched_vtables.add (BINFO_VTABLE (binfo));
3247
3248 /* Next walk recursively all derived types. */
3249 if (context.maybe_derived_type)
3250 {
3251 for (i = 0; i < outer_type->derived_types.length(); i++)
3252 possible_polymorphic_call_targets_1 (nodes, &inserted,
3253 &matched_vtables,
3254 otr_type,
3255 outer_type->derived_types[i],
3256 otr_token, outer_type->type,
3257 context.offset, &complete,
3258 bases_to_consider,
3259 context.maybe_in_construction);
3260
3261 if (!outer_type->all_derivations_known)
3262 {
3263 if (!speculative && final_warning_records
3264 && nodes.length () == 1
3265 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3266 {
3267 if (complete
3268 && warn_suggest_final_types
3269 && !outer_type->derived_types.length ())
3270 {
3271 final_warning_records->grow_type_warnings
3272 (outer_type->id);
3273 final_warning_records->type_warnings[outer_type->id].count++;
3274 if (!final_warning_records->type_warnings
3275 [outer_type->id].dyn_count.initialized_p ())
3276 final_warning_records->type_warnings
3277 [outer_type->id].dyn_count = profile_count::zero ();
3278 final_warning_records->type_warnings[outer_type->id].dyn_count
3279 += final_warning_records->dyn_count;
3280 final_warning_records->type_warnings[outer_type->id].type
3281 = outer_type->type;
3282 (*slot)->type_warning = outer_type->id + 1;
3283 }
3284 if (complete
3285 && warn_suggest_final_methods
3286 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3287 outer_type->type))
3288 {
3289 bool existed;
3290 struct decl_warn_count &c =
3291 final_warning_records->decl_warnings.get_or_insert
3292 (nodes[0]->decl, &existed);
3293
3294 if (existed)
3295 {
3296 c.count++;
3297 c.dyn_count += final_warning_records->dyn_count;
3298 }
3299 else
3300 {
3301 c.count = 1;
3302 c.dyn_count = final_warning_records->dyn_count;
3303 c.decl = nodes[0]->decl;
3304 }
3305 (*slot)->decl_warning = nodes[0]->decl;
3306 }
3307 }
3308 complete = false;
3309 }
3310 }
3311
3312 if (!speculative)
3313 {
3314 /* Destructors are never called through construction virtual tables,
3315 because the type is always known. One of entries may be
3316 cxa_pure_virtual so look to at least two of them. */
3317 if (context.maybe_in_construction)
3318 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3319 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3320 context.maybe_in_construction = false;
3321 if (context.maybe_in_construction)
3322 {
3323 if (type != outer_type
3324 && (!skipped
3325 || (context.maybe_derived_type
3326 && !type_all_derivations_known_p (outer_type->type))))
3327 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3328 context.offset, nodes, &inserted,
3329 &matched_vtables, &complete);
3330 if (skipped)
3331 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3332 for (i = 0; i < bases_to_consider.length(); i++)
3333 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3334 }
3335 }
3336 }
3337
3338 (*slot)->targets = nodes;
3339 (*slot)->complete = complete;
3340 (*slot)->n_odr_types = odr_types.length ();
3341 if (completep)
3342 *completep = complete;
3343
3344 timevar_pop (TV_IPA_VIRTUAL_CALL);
3345 return nodes;
3346 }
3347
3348 bool
3349 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3350 vec<const decl_warn_count*> *vec)
3351 {
3352 vec->safe_push (&value);
3353 return true;
3354 }
3355
3356 /* Dump target list TARGETS into FILE. */
3357
3358 static void
3359 dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3360 {
3361 unsigned int i;
3362
3363 for (i = 0; i < targets.length (); i++)
3364 {
3365 char *name = NULL;
3366 if (in_lto_p)
3367 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3368 fprintf (f, " %s", name ? name : targets[i]->dump_name ());
3369 if (in_lto_p)
3370 free (name);
3371 if (!targets[i]->definition)
3372 fprintf (f, " (no definition%s)",
3373 DECL_DECLARED_INLINE_P (targets[i]->decl)
3374 ? " inline" : "");
3375 /* With many targets for every call polymorphic dumps are going to
3376 be quadratic in size. */
3377 if (i > 10 && !verbose)
3378 {
3379 fprintf (f, " ... and %i more targets\n", targets.length () - i);
3380 return;
3381 }
3382 }
3383 fprintf (f, "\n");
3384 }
3385
3386 /* Dump all possible targets of a polymorphic call. */
3387
3388 void
3389 dump_possible_polymorphic_call_targets (FILE *f,
3390 tree otr_type,
3391 HOST_WIDE_INT otr_token,
3392 const ipa_polymorphic_call_context &ctx,
3393 bool verbose)
3394 {
3395 vec <cgraph_node *> targets;
3396 bool final;
3397 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3398 unsigned int len;
3399
3400 if (!type)
3401 return;
3402 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3403 ctx,
3404 &final, NULL, false);
3405 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3406 print_generic_expr (f, type->type, TDF_SLIM);
3407 fprintf (f, " token %i\n", (int)otr_token);
3408
3409 ctx.dump (f);
3410
3411 fprintf (f, " %s%s%s%s\n ",
3412 final ? "This is a complete list." :
3413 "This is partial list; extra targets may be defined in other units.",
3414 ctx.maybe_in_construction ? " (base types included)" : "",
3415 ctx.maybe_derived_type ? " (derived types included)" : "",
3416 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3417 len = targets.length ();
3418 dump_targets (f, targets, verbose);
3419
3420 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3421 ctx,
3422 &final, NULL, true);
3423 if (targets.length () != len)
3424 {
3425 fprintf (f, " Speculative targets:");
3426 dump_targets (f, targets, verbose);
3427 }
3428 /* Ugly: during callgraph construction the target cache may get populated
3429 before all targets are found. While this is harmless (because all local
3430 types are discovered and only in those case we devirtualize fully and we
3431 don't do speculative devirtualization before IPA stage) it triggers
3432 assert here when dumping at that stage also populates the case with
3433 speculative targets. Quietly ignore this. */
3434 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3435 fprintf (f, "\n");
3436 }
3437
3438
3439 /* Return true if N can be possibly target of a polymorphic call of
3440 OTR_TYPE/OTR_TOKEN. */
3441
3442 bool
3443 possible_polymorphic_call_target_p (tree otr_type,
3444 HOST_WIDE_INT otr_token,
3445 const ipa_polymorphic_call_context &ctx,
3446 struct cgraph_node *n)
3447 {
3448 vec <cgraph_node *> targets;
3449 unsigned int i;
3450 bool final;
3451
3452 if (fndecl_built_in_p (n->decl, BUILT_IN_UNREACHABLE)
3453 || fndecl_built_in_p (n->decl, BUILT_IN_TRAP))
3454 return true;
3455
3456 if (is_cxa_pure_virtual_p (n->decl))
3457 return true;
3458
3459 if (!odr_hash)
3460 return true;
3461 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3462 for (i = 0; i < targets.length (); i++)
3463 if (n->semantically_equivalent_p (targets[i]))
3464 return true;
3465
3466 /* At a moment we allow middle end to dig out new external declarations
3467 as a targets of polymorphic calls. */
3468 if (!final && !n->definition)
3469 return true;
3470 return false;
3471 }
3472
3473
3474
3475 /* Return true if N can be possibly target of a polymorphic call of
3476 OBJ_TYPE_REF expression REF in STMT. */
3477
3478 bool
3479 possible_polymorphic_call_target_p (tree ref,
3480 gimple *stmt,
3481 struct cgraph_node *n)
3482 {
3483 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3484 tree call_fn = gimple_call_fn (stmt);
3485
3486 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3487 tree_to_uhwi
3488 (OBJ_TYPE_REF_TOKEN (call_fn)),
3489 context,
3490 n);
3491 }
3492
3493
3494 /* After callgraph construction new external nodes may appear.
3495 Add them into the graph. */
3496
3497 void
3498 update_type_inheritance_graph (void)
3499 {
3500 struct cgraph_node *n;
3501
3502 if (!odr_hash)
3503 return;
3504 free_polymorphic_call_targets_hash ();
3505 timevar_push (TV_IPA_INHERITANCE);
3506 /* We reconstruct the graph starting from types of all methods seen in the
3507 unit. */
3508 FOR_EACH_FUNCTION (n)
3509 if (DECL_VIRTUAL_P (n->decl)
3510 && !n->definition
3511 && n->real_symbol_p ())
3512 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3513 timevar_pop (TV_IPA_INHERITANCE);
3514 }
3515
3516
3517 /* Return true if N looks like likely target of a polymorphic call.
3518 Rule out cxa_pure_virtual, noreturns, function declared cold and
3519 other obvious cases. */
3520
3521 bool
3522 likely_target_p (struct cgraph_node *n)
3523 {
3524 int flags;
3525 /* cxa_pure_virtual and similar things are not likely. */
3526 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3527 return false;
3528 flags = flags_from_decl_or_type (n->decl);
3529 if (flags & ECF_NORETURN)
3530 return false;
3531 if (lookup_attribute ("cold",
3532 DECL_ATTRIBUTES (n->decl)))
3533 return false;
3534 if (n->frequency < NODE_FREQUENCY_NORMAL)
3535 return false;
3536 /* If there are no live virtual tables referring the target,
3537 the only way the target can be called is an instance coming from other
3538 compilation unit; speculative devirtualization is built around an
3539 assumption that won't happen. */
3540 if (!referenced_from_vtable_p (n))
3541 return false;
3542 return true;
3543 }
3544
3545 /* Compare type warning records P1 and P2 and choose one with larger count;
3546 helper for qsort. */
3547
3548 static int
3549 type_warning_cmp (const void *p1, const void *p2)
3550 {
3551 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3552 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3553
3554 if (t1->dyn_count < t2->dyn_count)
3555 return 1;
3556 if (t1->dyn_count > t2->dyn_count)
3557 return -1;
3558 return t2->count - t1->count;
3559 }
3560
3561 /* Compare decl warning records P1 and P2 and choose one with larger count;
3562 helper for qsort. */
3563
3564 static int
3565 decl_warning_cmp (const void *p1, const void *p2)
3566 {
3567 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3568 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3569
3570 if (t1->dyn_count < t2->dyn_count)
3571 return 1;
3572 if (t1->dyn_count > t2->dyn_count)
3573 return -1;
3574 return t2->count - t1->count;
3575 }
3576
3577
3578 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3579 context CTX. */
3580
3581 struct cgraph_node *
3582 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3583 ipa_polymorphic_call_context ctx)
3584 {
3585 vec <cgraph_node *>targets
3586 = possible_polymorphic_call_targets
3587 (otr_type, otr_token, ctx, NULL, NULL, true);
3588 unsigned int i;
3589 struct cgraph_node *likely_target = NULL;
3590
3591 for (i = 0; i < targets.length (); i++)
3592 if (likely_target_p (targets[i]))
3593 {
3594 if (likely_target)
3595 return NULL;
3596 likely_target = targets[i];
3597 }
3598 if (!likely_target
3599 ||!likely_target->definition
3600 || DECL_EXTERNAL (likely_target->decl))
3601 return NULL;
3602
3603 /* Don't use an implicitly-declared destructor (c++/58678). */
3604 struct cgraph_node *non_thunk_target
3605 = likely_target->function_symbol ();
3606 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3607 return NULL;
3608 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3609 && likely_target->can_be_discarded_p ())
3610 return NULL;
3611 return likely_target;
3612 }
3613
3614 /* The ipa-devirt pass.
3615 When polymorphic call has only one likely target in the unit,
3616 turn it into a speculative call. */
3617
3618 static unsigned int
3619 ipa_devirt (void)
3620 {
3621 struct cgraph_node *n;
3622 hash_set<void *> bad_call_targets;
3623 struct cgraph_edge *e;
3624
3625 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3626 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3627 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3628 int ndropped = 0;
3629
3630 if (!odr_types_ptr)
3631 return 0;
3632
3633 if (dump_file)
3634 dump_type_inheritance_graph (dump_file);
3635
3636 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3637 This is implemented by setting up final_warning_records that are updated
3638 by get_polymorphic_call_targets.
3639 We need to clear cache in this case to trigger recomputation of all
3640 entries. */
3641 if (warn_suggest_final_methods || warn_suggest_final_types)
3642 {
3643 final_warning_records = new (final_warning_record);
3644 final_warning_records->dyn_count = profile_count::zero ();
3645 final_warning_records->grow_type_warnings (odr_types.length ());
3646 free_polymorphic_call_targets_hash ();
3647 }
3648
3649 FOR_EACH_DEFINED_FUNCTION (n)
3650 {
3651 bool update = false;
3652 if (!opt_for_fn (n->decl, flag_devirtualize))
3653 continue;
3654 if (dump_file && n->indirect_calls)
3655 fprintf (dump_file, "\n\nProcesing function %s\n",
3656 n->dump_name ());
3657 for (e = n->indirect_calls; e; e = e->next_callee)
3658 if (e->indirect_info->polymorphic)
3659 {
3660 struct cgraph_node *likely_target = NULL;
3661 void *cache_token;
3662 bool final;
3663
3664 if (final_warning_records)
3665 final_warning_records->dyn_count = e->count.ipa ();
3666
3667 vec <cgraph_node *>targets
3668 = possible_polymorphic_call_targets
3669 (e, &final, &cache_token, true);
3670 unsigned int i;
3671
3672 /* Trigger warnings by calculating non-speculative targets. */
3673 if (warn_suggest_final_methods || warn_suggest_final_types)
3674 possible_polymorphic_call_targets (e);
3675
3676 if (dump_file)
3677 dump_possible_polymorphic_call_targets
3678 (dump_file, e, (dump_flags & TDF_DETAILS));
3679
3680 npolymorphic++;
3681
3682 /* See if the call can be devirtualized by means of ipa-prop's
3683 polymorphic call context propagation. If not, we can just
3684 forget about this call being polymorphic and avoid some heavy
3685 lifting in remove_unreachable_nodes that will otherwise try to
3686 keep all possible targets alive until inlining and in the inliner
3687 itself.
3688
3689 This may need to be revisited once we add further ways to use
3690 the may edges, but it is a reasonable thing to do right now. */
3691
3692 if ((e->indirect_info->param_index == -1
3693 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3694 && e->indirect_info->vptr_changed))
3695 && !flag_ltrans_devirtualize)
3696 {
3697 e->indirect_info->polymorphic = false;
3698 ndropped++;
3699 if (dump_file)
3700 fprintf (dump_file, "Dropping polymorphic call info;"
3701 " it cannot be used by ipa-prop\n");
3702 }
3703
3704 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3705 continue;
3706
3707 if (!e->maybe_hot_p ())
3708 {
3709 if (dump_file)
3710 fprintf (dump_file, "Call is cold\n\n");
3711 ncold++;
3712 continue;
3713 }
3714 if (e->speculative)
3715 {
3716 if (dump_file)
3717 fprintf (dump_file, "Call is already speculated\n\n");
3718 nspeculated++;
3719
3720 /* When dumping see if we agree with speculation. */
3721 if (!dump_file)
3722 continue;
3723 }
3724 if (bad_call_targets.contains (cache_token))
3725 {
3726 if (dump_file)
3727 fprintf (dump_file, "Target list is known to be useless\n\n");
3728 nmultiple++;
3729 continue;
3730 }
3731 for (i = 0; i < targets.length (); i++)
3732 if (likely_target_p (targets[i]))
3733 {
3734 if (likely_target)
3735 {
3736 likely_target = NULL;
3737 if (dump_file)
3738 fprintf (dump_file, "More than one likely target\n\n");
3739 nmultiple++;
3740 break;
3741 }
3742 likely_target = targets[i];
3743 }
3744 if (!likely_target)
3745 {
3746 bad_call_targets.add (cache_token);
3747 continue;
3748 }
3749 /* This is reached only when dumping; check if we agree or disagree
3750 with the speculation. */
3751 if (e->speculative)
3752 {
3753 bool found = e->speculative_call_for_target (likely_target);
3754 if (found)
3755 {
3756 fprintf (dump_file, "We agree with speculation\n\n");
3757 nok++;
3758 }
3759 else
3760 {
3761 fprintf (dump_file, "We disagree with speculation\n\n");
3762 nwrong++;
3763 }
3764 continue;
3765 }
3766 if (!likely_target->definition)
3767 {
3768 if (dump_file)
3769 fprintf (dump_file, "Target is not a definition\n\n");
3770 nnotdefined++;
3771 continue;
3772 }
3773 /* Do not introduce new references to external symbols. While we
3774 can handle these just well, it is common for programs to
3775 incorrectly with headers defining methods they are linked
3776 with. */
3777 if (DECL_EXTERNAL (likely_target->decl))
3778 {
3779 if (dump_file)
3780 fprintf (dump_file, "Target is external\n\n");
3781 nexternal++;
3782 continue;
3783 }
3784 /* Don't use an implicitly-declared destructor (c++/58678). */
3785 struct cgraph_node *non_thunk_target
3786 = likely_target->function_symbol ();
3787 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3788 {
3789 if (dump_file)
3790 fprintf (dump_file, "Target is artificial\n\n");
3791 nartificial++;
3792 continue;
3793 }
3794 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3795 && likely_target->can_be_discarded_p ())
3796 {
3797 if (dump_file)
3798 fprintf (dump_file, "Target is overwritable\n\n");
3799 noverwritable++;
3800 continue;
3801 }
3802 else if (dbg_cnt (devirt))
3803 {
3804 if (dump_enabled_p ())
3805 {
3806 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3807 "speculatively devirtualizing call "
3808 "in %s to %s\n",
3809 n->dump_name (),
3810 likely_target->dump_name ());
3811 }
3812 if (!likely_target->can_be_discarded_p ())
3813 {
3814 cgraph_node *alias;
3815 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3816 if (alias)
3817 likely_target = alias;
3818 }
3819 nconverted++;
3820 update = true;
3821 e->make_speculative
3822 (likely_target, e->count.apply_scale (8, 10));
3823 }
3824 }
3825 if (update)
3826 ipa_update_overall_fn_summary (n);
3827 }
3828 if (warn_suggest_final_methods || warn_suggest_final_types)
3829 {
3830 if (warn_suggest_final_types)
3831 {
3832 final_warning_records->type_warnings.qsort (type_warning_cmp);
3833 for (unsigned int i = 0;
3834 i < final_warning_records->type_warnings.length (); i++)
3835 if (final_warning_records->type_warnings[i].count)
3836 {
3837 tree type = final_warning_records->type_warnings[i].type;
3838 int count = final_warning_records->type_warnings[i].count;
3839 profile_count dyn_count
3840 = final_warning_records->type_warnings[i].dyn_count;
3841
3842 if (!(dyn_count > 0))
3843 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3844 OPT_Wsuggest_final_types, count,
3845 "Declaring type %qD final "
3846 "would enable devirtualization of %i call",
3847 "Declaring type %qD final "
3848 "would enable devirtualization of %i calls",
3849 type,
3850 count);
3851 else
3852 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3853 OPT_Wsuggest_final_types, count,
3854 "Declaring type %qD final "
3855 "would enable devirtualization of %i call "
3856 "executed %lli times",
3857 "Declaring type %qD final "
3858 "would enable devirtualization of %i calls "
3859 "executed %lli times",
3860 type,
3861 count,
3862 (long long) dyn_count.to_gcov_type ());
3863 }
3864 }
3865
3866 if (warn_suggest_final_methods)
3867 {
3868 auto_vec<const decl_warn_count*> decl_warnings_vec;
3869
3870 final_warning_records->decl_warnings.traverse
3871 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3872 decl_warnings_vec.qsort (decl_warning_cmp);
3873 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3874 {
3875 tree decl = decl_warnings_vec[i]->decl;
3876 int count = decl_warnings_vec[i]->count;
3877 profile_count dyn_count
3878 = decl_warnings_vec[i]->dyn_count;
3879
3880 if (!(dyn_count > 0))
3881 if (DECL_CXX_DESTRUCTOR_P (decl))
3882 warning_n (DECL_SOURCE_LOCATION (decl),
3883 OPT_Wsuggest_final_methods, count,
3884 "Declaring virtual destructor of %qD final "
3885 "would enable devirtualization of %i call",
3886 "Declaring virtual destructor of %qD final "
3887 "would enable devirtualization of %i calls",
3888 DECL_CONTEXT (decl), count);
3889 else
3890 warning_n (DECL_SOURCE_LOCATION (decl),
3891 OPT_Wsuggest_final_methods, count,
3892 "Declaring method %qD final "
3893 "would enable devirtualization of %i call",
3894 "Declaring method %qD final "
3895 "would enable devirtualization of %i calls",
3896 decl, count);
3897 else if (DECL_CXX_DESTRUCTOR_P (decl))
3898 warning_n (DECL_SOURCE_LOCATION (decl),
3899 OPT_Wsuggest_final_methods, count,
3900 "Declaring virtual destructor of %qD final "
3901 "would enable devirtualization of %i call "
3902 "executed %lli times",
3903 "Declaring virtual destructor of %qD final "
3904 "would enable devirtualization of %i calls "
3905 "executed %lli times",
3906 DECL_CONTEXT (decl), count,
3907 (long long)dyn_count.to_gcov_type ());
3908 else
3909 warning_n (DECL_SOURCE_LOCATION (decl),
3910 OPT_Wsuggest_final_methods, count,
3911 "Declaring method %qD final "
3912 "would enable devirtualization of %i call "
3913 "executed %lli times",
3914 "Declaring method %qD final "
3915 "would enable devirtualization of %i calls "
3916 "executed %lli times",
3917 decl, count,
3918 (long long)dyn_count.to_gcov_type ());
3919 }
3920 }
3921
3922 delete (final_warning_records);
3923 final_warning_records = 0;
3924 }
3925
3926 if (dump_file)
3927 fprintf (dump_file,
3928 "%i polymorphic calls, %i devirtualized,"
3929 " %i speculatively devirtualized, %i cold\n"
3930 "%i have multiple targets, %i overwritable,"
3931 " %i already speculated (%i agree, %i disagree),"
3932 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3933 npolymorphic, ndevirtualized, nconverted, ncold,
3934 nmultiple, noverwritable, nspeculated, nok, nwrong,
3935 nexternal, nnotdefined, nartificial, ndropped);
3936 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3937 }
3938
3939 namespace {
3940
3941 const pass_data pass_data_ipa_devirt =
3942 {
3943 IPA_PASS, /* type */
3944 "devirt", /* name */
3945 OPTGROUP_NONE, /* optinfo_flags */
3946 TV_IPA_DEVIRT, /* tv_id */
3947 0, /* properties_required */
3948 0, /* properties_provided */
3949 0, /* properties_destroyed */
3950 0, /* todo_flags_start */
3951 ( TODO_dump_symtab ), /* todo_flags_finish */
3952 };
3953
3954 class pass_ipa_devirt : public ipa_opt_pass_d
3955 {
3956 public:
3957 pass_ipa_devirt (gcc::context *ctxt)
3958 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3959 NULL, /* generate_summary */
3960 NULL, /* write_summary */
3961 NULL, /* read_summary */
3962 NULL, /* write_optimization_summary */
3963 NULL, /* read_optimization_summary */
3964 NULL, /* stmt_fixup */
3965 0, /* function_transform_todo_flags_start */
3966 NULL, /* function_transform */
3967 NULL) /* variable_transform */
3968 {}
3969
3970 /* opt_pass methods: */
3971 virtual bool gate (function *)
3972 {
3973 /* In LTO, always run the IPA passes and decide on function basis if the
3974 pass is enabled. */
3975 if (in_lto_p)
3976 return true;
3977 return (flag_devirtualize
3978 && (flag_devirtualize_speculatively
3979 || (warn_suggest_final_methods
3980 || warn_suggest_final_types))
3981 && optimize);
3982 }
3983
3984 virtual unsigned int execute (function *) { return ipa_devirt (); }
3985
3986 }; // class pass_ipa_devirt
3987
3988 } // anon namespace
3989
3990 ipa_opt_pass_d *
3991 make_pass_ipa_devirt (gcc::context *ctxt)
3992 {
3993 return new pass_ipa_devirt (ctxt);
3994 }
3995
3996 /* Print ODR name of a TYPE if available.
3997 Use demangler when option DEMANGLE is used. */
3998
3999 DEBUG_FUNCTION void
4000 debug_tree_odr_name (tree type, bool demangle)
4001 {
4002 const char *odr = get_odr_name_for_type (type);
4003 if (demangle)
4004 {
4005 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4006 odr = cplus_demangle (odr, opts);
4007 }
4008
4009 fprintf (stderr, "%s\n", odr);
4010 }
4011
4012 /* Register ODR enum so we later stream record about its values. */
4013
4014 void
4015 register_odr_enum (tree t)
4016 {
4017 if (flag_lto)
4018 vec_safe_push (odr_enums, t);
4019 }
4020
4021 /* Write ODR enums to LTO stream file. */
4022
4023 static void
4024 ipa_odr_summary_write (void)
4025 {
4026 if (!odr_enums && !odr_enum_map)
4027 return;
4028 struct output_block *ob = create_output_block (LTO_section_odr_types);
4029 unsigned int i;
4030 tree t;
4031
4032 if (odr_enums)
4033 {
4034 streamer_write_uhwi (ob, odr_enums->length ());
4035
4036 /* For every ODR enum stream out
4037 - its ODR name
4038 - number of values,
4039 - value names and constant their represent
4040 - bitpack of locations so we can do good diagnostics. */
4041 FOR_EACH_VEC_ELT (*odr_enums, i, t)
4042 {
4043 streamer_write_string (ob, ob->main_stream,
4044 IDENTIFIER_POINTER
4045 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))),
4046 true);
4047
4048 int n = 0;
4049 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4050 n++;
4051 streamer_write_uhwi (ob, n);
4052 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4053 {
4054 streamer_write_string (ob, ob->main_stream,
4055 IDENTIFIER_POINTER (TREE_PURPOSE (e)),
4056 true);
4057 streamer_write_wide_int (ob,
4058 wi::to_wide (DECL_INITIAL
4059 (TREE_VALUE (e))));
4060 }
4061
4062 bitpack_d bp = bitpack_create (ob->main_stream);
4063 lto_output_location (ob, &bp, DECL_SOURCE_LOCATION (TYPE_NAME (t)));
4064 for (tree e = TYPE_VALUES (t); e; e = TREE_CHAIN (e))
4065 lto_output_location (ob, &bp,
4066 DECL_SOURCE_LOCATION (TREE_VALUE (e)));
4067 streamer_write_bitpack (&bp);
4068 }
4069 vec_free (odr_enums);
4070 odr_enums = NULL;
4071 }
4072 /* During LTO incremental linking we already have streamed in types. */
4073 else if (odr_enum_map)
4074 {
4075 gcc_checking_assert (!odr_enums);
4076 streamer_write_uhwi (ob, odr_enum_map->elements ());
4077
4078 hash_map<nofree_string_hash, odr_enum>::iterator iter
4079 = odr_enum_map->begin ();
4080 for (; iter != odr_enum_map->end (); ++iter)
4081 {
4082 odr_enum &this_enum = (*iter).second;
4083 streamer_write_string (ob, ob->main_stream, (*iter).first, true);
4084
4085 streamer_write_uhwi (ob, this_enum.vals.length ());
4086 for (unsigned j = 0; j < this_enum.vals.length (); j++)
4087 {
4088 streamer_write_string (ob, ob->main_stream,
4089 this_enum.vals[j].name, true);
4090 streamer_write_wide_int (ob, this_enum.vals[j].val);
4091 }
4092
4093 bitpack_d bp = bitpack_create (ob->main_stream);
4094 lto_output_location (ob, &bp, this_enum.locus);
4095 for (unsigned j = 0; j < this_enum.vals.length (); j++)
4096 lto_output_location (ob, &bp, this_enum.vals[j].locus);
4097 streamer_write_bitpack (&bp);
4098 }
4099
4100 delete odr_enum_map;
4101 obstack_free (&odr_enum_obstack, NULL);
4102 odr_enum_map = NULL;
4103 }
4104
4105 produce_asm (ob, NULL);
4106 destroy_output_block (ob);
4107 }
4108
4109 /* Write ODR enums from LTO stream file and warn on mismatches. */
4110
4111 static void
4112 ipa_odr_read_section (struct lto_file_decl_data *file_data, const char *data,
4113 size_t len)
4114 {
4115 const struct lto_function_header *header
4116 = (const struct lto_function_header *) data;
4117 const int cfg_offset = sizeof (struct lto_function_header);
4118 const int main_offset = cfg_offset + header->cfg_size;
4119 const int string_offset = main_offset + header->main_size;
4120 class data_in *data_in;
4121
4122 lto_input_block ib ((const char *) data + main_offset, header->main_size,
4123 file_data->mode_table);
4124
4125 data_in
4126 = lto_data_in_create (file_data, (const char *) data + string_offset,
4127 header->string_size, vNULL);
4128 unsigned int n = streamer_read_uhwi (&ib);
4129
4130 if (!odr_enum_map)
4131 {
4132 gcc_obstack_init (&odr_enum_obstack);
4133 odr_enum_map = new (hash_map <nofree_string_hash, odr_enum>);
4134 }
4135
4136 for (unsigned i = 0; i < n; i++)
4137 {
4138 const char *rname = streamer_read_string (data_in, &ib);
4139 unsigned int nvals = streamer_read_uhwi (&ib);
4140 char *name;
4141
4142 obstack_grow (&odr_enum_obstack, rname, strlen (rname) + 1);
4143 name = XOBFINISH (&odr_enum_obstack, char *);
4144
4145 bool existed_p;
4146 class odr_enum &this_enum
4147 = odr_enum_map->get_or_insert (xstrdup (name), &existed_p);
4148
4149 /* If this is first time we see the enum, remember its definition. */
4150 if (!existed_p)
4151 {
4152 this_enum.vals.safe_grow_cleared (nvals, true);
4153 this_enum.warned = false;
4154 if (dump_file)
4155 fprintf (dump_file, "enum %s\n{\n", name);
4156 for (unsigned j = 0; j < nvals; j++)
4157 {
4158 const char *val_name = streamer_read_string (data_in, &ib);
4159 obstack_grow (&odr_enum_obstack, val_name, strlen (val_name) + 1);
4160 this_enum.vals[j].name = XOBFINISH (&odr_enum_obstack, char *);
4161 this_enum.vals[j].val = streamer_read_wide_int (&ib);
4162 if (dump_file)
4163 fprintf (dump_file, " %s = " HOST_WIDE_INT_PRINT_DEC ",\n",
4164 val_name, wi::fits_shwi_p (this_enum.vals[j].val)
4165 ? this_enum.vals[j].val.to_shwi () : -1);
4166 }
4167 bitpack_d bp = streamer_read_bitpack (&ib);
4168 stream_input_location (&this_enum.locus, &bp, data_in);
4169 for (unsigned j = 0; j < nvals; j++)
4170 stream_input_location (&this_enum.vals[j].locus, &bp, data_in);
4171 data_in->location_cache.apply_location_cache ();
4172 if (dump_file)
4173 fprintf (dump_file, "}\n");
4174 }
4175 /* If we already have definition, compare it with new one and output
4176 warnings if they differs. */
4177 else
4178 {
4179 int do_warning = -1;
4180 char *warn_name = NULL;
4181 wide_int warn_value = wi::zero (1);
4182
4183 if (dump_file)
4184 fprintf (dump_file, "Comparing enum %s\n", name);
4185
4186 /* Look for differences which we will warn about later once locations
4187 are streamed. */
4188 for (unsigned j = 0; j < nvals; j++)
4189 {
4190 const char *id = streamer_read_string (data_in, &ib);
4191 wide_int val = streamer_read_wide_int (&ib);
4192
4193 if (do_warning != -1 || j >= this_enum.vals.length ())
4194 continue;
4195 if (strcmp (id, this_enum.vals[j].name)
4196 || (val.get_precision() !=
4197 this_enum.vals[j].val.get_precision())
4198 || val != this_enum.vals[j].val)
4199 {
4200 warn_name = xstrdup (id);
4201 warn_value = val;
4202 do_warning = j;
4203 if (dump_file)
4204 fprintf (dump_file, " Different on entry %i\n", j);
4205 }
4206 }
4207
4208 /* Stream in locations, but do not apply them unless we are going
4209 to warn. */
4210 bitpack_d bp = streamer_read_bitpack (&ib);
4211 location_t locus;
4212
4213 stream_input_location (&locus, &bp, data_in);
4214
4215 /* Did we find a difference? */
4216 if (do_warning != -1 || nvals != this_enum.vals.length ())
4217 {
4218 data_in->location_cache.apply_location_cache ();
4219
4220 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
4221 char *dmgname = cplus_demangle (name, opts);
4222 if (this_enum.warned
4223 || !warning_at (this_enum.locus,
4224 OPT_Wodr, "type %qs violates the "
4225 "C++ One Definition Rule",
4226 dmgname))
4227 do_warning = -1;
4228 else
4229 {
4230 this_enum.warned = true;
4231 if (do_warning == -1)
4232 inform (locus,
4233 "an enum with different number of values is defined"
4234 " in another translation unit");
4235 else if (warn_name)
4236 inform (locus,
4237 "an enum with different value name"
4238 " is defined in another translation unit");
4239 else
4240 inform (locus,
4241 "an enum with different values"
4242 " is defined in another translation unit");
4243 }
4244 }
4245 else
4246 data_in->location_cache.revert_location_cache ();
4247
4248 /* Finally look up for location of the actual value that diverged. */
4249 for (unsigned j = 0; j < nvals; j++)
4250 {
4251 location_t id_locus;
4252
4253 data_in->location_cache.revert_location_cache ();
4254 stream_input_location (&id_locus, &bp, data_in);
4255
4256 if ((int) j == do_warning)
4257 {
4258 data_in->location_cache.apply_location_cache ();
4259
4260 if (strcmp (warn_name, this_enum.vals[j].name))
4261 inform (this_enum.vals[j].locus,
4262 "name %qs differs from name %qs defined"
4263 " in another translation unit",
4264 this_enum.vals[j].name, warn_name);
4265 else if (this_enum.vals[j].val.get_precision() !=
4266 warn_value.get_precision())
4267 inform (this_enum.vals[j].locus,
4268 "name %qs is defined as %u-bit while another "
4269 "translation unit defines it as %u-bit",
4270 warn_name, this_enum.vals[j].val.get_precision(),
4271 warn_value.get_precision());
4272 /* FIXME: In case there is easy way to print wide_ints,
4273 perhaps we could do it here instead of overflow check. */
4274 else if (wi::fits_shwi_p (this_enum.vals[j].val)
4275 && wi::fits_shwi_p (warn_value))
4276 inform (this_enum.vals[j].locus,
4277 "name %qs is defined to %wd while another "
4278 "translation unit defines it as %wd",
4279 warn_name, this_enum.vals[j].val.to_shwi (),
4280 warn_value.to_shwi ());
4281 else
4282 inform (this_enum.vals[j].locus,
4283 "name %qs is defined to different value "
4284 "in another translation unit",
4285 warn_name);
4286
4287 inform (id_locus,
4288 "mismatching definition");
4289 }
4290 else
4291 data_in->location_cache.revert_location_cache ();
4292 }
4293 if (warn_name)
4294 free (warn_name);
4295 obstack_free (&odr_enum_obstack, name);
4296 }
4297 }
4298 lto_free_section_data (file_data, LTO_section_ipa_fn_summary, NULL, data,
4299 len);
4300 lto_data_in_delete (data_in);
4301 }
4302
4303 /* Read all ODR type sections. */
4304
4305 static void
4306 ipa_odr_summary_read (void)
4307 {
4308 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4309 struct lto_file_decl_data *file_data;
4310 unsigned int j = 0;
4311
4312 while ((file_data = file_data_vec[j++]))
4313 {
4314 size_t len;
4315 const char *data
4316 = lto_get_summary_section_data (file_data, LTO_section_odr_types,
4317 &len);
4318 if (data)
4319 ipa_odr_read_section (file_data, data, len);
4320 }
4321 /* Enum info is used only to produce warnings. Only case we will need it
4322 again is streaming for incremental LTO. */
4323 if (flag_incremental_link != INCREMENTAL_LINK_LTO)
4324 {
4325 delete odr_enum_map;
4326 obstack_free (&odr_enum_obstack, NULL);
4327 odr_enum_map = NULL;
4328 }
4329 }
4330
4331 namespace {
4332
4333 const pass_data pass_data_ipa_odr =
4334 {
4335 IPA_PASS, /* type */
4336 "odr", /* name */
4337 OPTGROUP_NONE, /* optinfo_flags */
4338 TV_IPA_ODR, /* tv_id */
4339 0, /* properties_required */
4340 0, /* properties_provided */
4341 0, /* properties_destroyed */
4342 0, /* todo_flags_start */
4343 0, /* todo_flags_finish */
4344 };
4345
4346 class pass_ipa_odr : public ipa_opt_pass_d
4347 {
4348 public:
4349 pass_ipa_odr (gcc::context *ctxt)
4350 : ipa_opt_pass_d (pass_data_ipa_odr, ctxt,
4351 NULL, /* generate_summary */
4352 ipa_odr_summary_write, /* write_summary */
4353 ipa_odr_summary_read, /* read_summary */
4354 NULL, /* write_optimization_summary */
4355 NULL, /* read_optimization_summary */
4356 NULL, /* stmt_fixup */
4357 0, /* function_transform_todo_flags_start */
4358 NULL, /* function_transform */
4359 NULL) /* variable_transform */
4360 {}
4361
4362 /* opt_pass methods: */
4363 virtual bool gate (function *)
4364 {
4365 return (in_lto_p || flag_lto);
4366 }
4367
4368 virtual unsigned int execute (function *)
4369 {
4370 return 0;
4371 }
4372
4373 }; // class pass_ipa_odr
4374
4375 } // anon namespace
4376
4377 ipa_opt_pass_d *
4378 make_pass_ipa_odr (gcc::context *ctxt)
4379 {
4380 return new pass_ipa_odr (ctxt);
4381 }
4382
4383
4384 #include "gt-ipa-devirt.h"