]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2020 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "rtl.h"
113 #include "tree.h"
114 #include "gimple.h"
115 #include "alloc-pool.h"
116 #include "tree-pass.h"
117 #include "cgraph.h"
118 #include "lto-streamer.h"
119 #include "fold-const.h"
120 #include "print-tree.h"
121 #include "calls.h"
122 #include "ipa-utils.h"
123 #include "gimple-fold.h"
124 #include "symbol-summary.h"
125 #include "tree-vrp.h"
126 #include "ipa-prop.h"
127 #include "ipa-fnsummary.h"
128 #include "demangle.h"
129 #include "dbgcnt.h"
130 #include "gimple-pretty-print.h"
131 #include "intl.h"
132 #include "stringpool.h"
133 #include "attribs.h"
134
135 /* Hash based set of pairs of types. */
136 struct type_pair
137 {
138 tree first;
139 tree second;
140 };
141
142 template <>
143 struct default_hash_traits <type_pair>
144 : typed_noop_remove <type_pair>
145 {
146 GTY((skip)) typedef type_pair value_type;
147 GTY((skip)) typedef type_pair compare_type;
148 static hashval_t
149 hash (type_pair p)
150 {
151 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
152 }
153 static bool
154 is_empty (type_pair p)
155 {
156 return p.first == NULL;
157 }
158 static bool
159 is_deleted (type_pair p ATTRIBUTE_UNUSED)
160 {
161 return false;
162 }
163 static bool
164 equal (const type_pair &a, const type_pair &b)
165 {
166 return a.first==b.first && a.second == b.second;
167 }
168 static void
169 mark_empty (type_pair &e)
170 {
171 e.first = NULL;
172 }
173 };
174
175 /* HACK alert: this is used to communicate with ipa-inline-transform that
176 thunk is being expanded and there is no need to clear the polymorphic
177 call target cache. */
178 bool thunk_expansion;
179
180 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
181 hash_set<type_pair> *,
182 location_t, location_t);
183 static void warn_odr (tree t1, tree t2, tree st1, tree st2,
184 bool warn, bool *warned, const char *reason);
185
186 static bool odr_violation_reported = false;
187
188
189 /* Pointer set of all call targets appearing in the cache. */
190 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
191
192 /* The node of type inheritance graph. For each type unique in
193 One Definition Rule (ODR) sense, we produce one node linking all
194 main variants of types equivalent to it, bases and derived types. */
195
196 struct GTY(()) odr_type_d
197 {
198 /* leader type. */
199 tree type;
200 /* All bases; built only for main variants of types. */
201 vec<odr_type> GTY((skip)) bases;
202 /* All derived types with virtual methods seen in unit;
203 built only for main variants of types. */
204 vec<odr_type> GTY((skip)) derived_types;
205
206 /* All equivalent types, if more than one. */
207 vec<tree, va_gc> *types;
208 /* Set of all equivalent types, if NON-NULL. */
209 hash_set<tree> * GTY((skip)) types_set;
210
211 /* Unique ID indexing the type in odr_types array. */
212 int id;
213 /* Is it in anonymous namespace? */
214 bool anonymous_namespace;
215 /* Do we know about all derivations of given type? */
216 bool all_derivations_known;
217 /* Did we report ODR violation here? */
218 bool odr_violated;
219 /* Set when virtual table without RTTI prevailed table with. */
220 bool rtti_broken;
221 /* Set when the canonical type is determined using the type name. */
222 bool tbaa_enabled;
223 };
224
225 /* Return TRUE if all derived types of T are known and thus
226 we may consider the walk of derived type complete.
227
228 This is typically true only for final anonymous namespace types and types
229 defined within functions (that may be COMDAT and thus shared across units,
230 but with the same set of derived types). */
231
232 bool
233 type_all_derivations_known_p (const_tree t)
234 {
235 if (TYPE_FINAL_P (t))
236 return true;
237 if (flag_ltrans)
238 return false;
239 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
240 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
241 return true;
242 if (type_in_anonymous_namespace_p (t))
243 return true;
244 return (decl_function_context (TYPE_NAME (t)) != NULL);
245 }
246
247 /* Return TRUE if type's constructors are all visible. */
248
249 static bool
250 type_all_ctors_visible_p (tree t)
251 {
252 return !flag_ltrans
253 && symtab->state >= CONSTRUCTION
254 /* We cannot always use type_all_derivations_known_p.
255 For function local types we must assume case where
256 the function is COMDAT and shared in between units.
257
258 TODO: These cases are quite easy to get, but we need
259 to keep track of C++ privatizing via -Wno-weak
260 as well as the IPA privatizing. */
261 && type_in_anonymous_namespace_p (t);
262 }
263
264 /* Return TRUE if type may have instance. */
265
266 static bool
267 type_possibly_instantiated_p (tree t)
268 {
269 tree vtable;
270 varpool_node *vnode;
271
272 /* TODO: Add abstract types here. */
273 if (!type_all_ctors_visible_p (t))
274 return true;
275
276 vtable = BINFO_VTABLE (TYPE_BINFO (t));
277 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
278 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
279 vnode = varpool_node::get (vtable);
280 return vnode && vnode->definition;
281 }
282
283 /* Hash used to unify ODR types based on their mangled name and for anonymous
284 namespace types. */
285
286 struct odr_name_hasher : pointer_hash <odr_type_d>
287 {
288 typedef union tree_node *compare_type;
289 static inline hashval_t hash (const odr_type_d *);
290 static inline bool equal (const odr_type_d *, const tree_node *);
291 static inline void remove (odr_type_d *);
292 };
293
294 static bool
295 can_be_name_hashed_p (tree t)
296 {
297 return (!in_lto_p || odr_type_p (t));
298 }
299
300 /* Hash type by its ODR name. */
301
302 static hashval_t
303 hash_odr_name (const_tree t)
304 {
305 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
306
307 /* If not in LTO, all main variants are unique, so we can do
308 pointer hash. */
309 if (!in_lto_p)
310 return htab_hash_pointer (t);
311
312 /* Anonymous types are unique. */
313 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
314 return htab_hash_pointer (t);
315
316 gcc_checking_assert (TYPE_NAME (t)
317 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
318 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
319 }
320
321 /* Return the computed hashcode for ODR_TYPE. */
322
323 inline hashval_t
324 odr_name_hasher::hash (const odr_type_d *odr_type)
325 {
326 return hash_odr_name (odr_type->type);
327 }
328
329 /* For languages with One Definition Rule, work out if
330 types are the same based on their name.
331
332 This is non-trivial for LTO where minor differences in
333 the type representation may have prevented type merging
334 to merge two copies of otherwise equivalent type.
335
336 Until we start streaming mangled type names, this function works
337 only for polymorphic types.
338 */
339
340 bool
341 types_same_for_odr (const_tree type1, const_tree type2)
342 {
343 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
344
345 type1 = TYPE_MAIN_VARIANT (type1);
346 type2 = TYPE_MAIN_VARIANT (type2);
347
348 if (type1 == type2)
349 return true;
350
351 if (!in_lto_p)
352 return false;
353
354 /* Anonymous namespace types are never duplicated. */
355 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
356 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
357 return false;
358
359 /* If both type has mangled defined check if they are same.
360 Watch for anonymous types which are all mangled as "<anon">. */
361 if (!type_with_linkage_p (type1) || !type_with_linkage_p (type2))
362 return false;
363 if (type_in_anonymous_namespace_p (type1)
364 || type_in_anonymous_namespace_p (type2))
365 return false;
366 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
367 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
368 }
369
370 /* Return true if we can decide on ODR equivalency.
371
372 In non-LTO it is always decide, in LTO however it depends in the type has
373 ODR info attached. */
374
375 bool
376 types_odr_comparable (tree t1, tree t2)
377 {
378 return (!in_lto_p
379 || TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)
380 || (odr_type_p (TYPE_MAIN_VARIANT (t1))
381 && odr_type_p (TYPE_MAIN_VARIANT (t2))));
382 }
383
384 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
385 known, be conservative and return false. */
386
387 bool
388 types_must_be_same_for_odr (tree t1, tree t2)
389 {
390 if (types_odr_comparable (t1, t2))
391 return types_same_for_odr (t1, t2);
392 else
393 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
394 }
395
396 /* If T is compound type, return type it is based on. */
397
398 static tree
399 compound_type_base (const_tree t)
400 {
401 if (TREE_CODE (t) == ARRAY_TYPE
402 || POINTER_TYPE_P (t)
403 || TREE_CODE (t) == COMPLEX_TYPE
404 || VECTOR_TYPE_P (t))
405 return TREE_TYPE (t);
406 if (TREE_CODE (t) == METHOD_TYPE)
407 return TYPE_METHOD_BASETYPE (t);
408 if (TREE_CODE (t) == OFFSET_TYPE)
409 return TYPE_OFFSET_BASETYPE (t);
410 return NULL_TREE;
411 }
412
413 /* Return true if T is either ODR type or compound type based from it.
414 If the function return true, we know that T is a type originating from C++
415 source even at link-time. */
416
417 bool
418 odr_or_derived_type_p (const_tree t)
419 {
420 do
421 {
422 if (odr_type_p (TYPE_MAIN_VARIANT (t)))
423 return true;
424 /* Function type is a tricky one. Basically we can consider it
425 ODR derived if return type or any of the parameters is.
426 We need to check all parameters because LTO streaming merges
427 common types (such as void) and they are not considered ODR then. */
428 if (TREE_CODE (t) == FUNCTION_TYPE)
429 {
430 if (TYPE_METHOD_BASETYPE (t))
431 t = TYPE_METHOD_BASETYPE (t);
432 else
433 {
434 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
435 return true;
436 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
437 if (odr_or_derived_type_p (TYPE_MAIN_VARIANT (TREE_VALUE (t))))
438 return true;
439 return false;
440 }
441 }
442 else
443 t = compound_type_base (t);
444 }
445 while (t);
446 return t;
447 }
448
449 /* Compare types T1 and T2 and return true if they are
450 equivalent. */
451
452 inline bool
453 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
454 {
455 tree t1 = o1->type;
456
457 gcc_checking_assert (TYPE_MAIN_VARIANT (t2) == t2);
458 gcc_checking_assert (TYPE_MAIN_VARIANT (t1) == t1);
459 if (t1 == t2)
460 return true;
461 if (!in_lto_p)
462 return false;
463 /* Check for anonymous namespaces. */
464 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
465 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
466 return false;
467 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
468 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
469 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
470 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
471 }
472
473 /* Free ODR type V. */
474
475 inline void
476 odr_name_hasher::remove (odr_type_d *v)
477 {
478 v->bases.release ();
479 v->derived_types.release ();
480 if (v->types_set)
481 delete v->types_set;
482 ggc_free (v);
483 }
484
485 /* ODR type hash used to look up ODR type based on tree type node. */
486
487 typedef hash_table<odr_name_hasher> odr_hash_type;
488 static odr_hash_type *odr_hash;
489
490 /* ODR types are also stored into ODR_TYPE vector to allow consistent
491 walking. Bases appear before derived types. Vector is garbage collected
492 so we won't end up visiting empty types. */
493
494 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
495 #define odr_types (*odr_types_ptr)
496
497 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
498 void
499 set_type_binfo (tree type, tree binfo)
500 {
501 for (; type; type = TYPE_NEXT_VARIANT (type))
502 if (COMPLETE_TYPE_P (type))
503 TYPE_BINFO (type) = binfo;
504 else
505 gcc_assert (!TYPE_BINFO (type));
506 }
507
508 /* Return true if type variants match.
509 This assumes that we already verified that T1 and T2 are variants of the
510 same type. */
511
512 static bool
513 type_variants_equivalent_p (tree t1, tree t2)
514 {
515 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
516 return false;
517
518 if (comp_type_attributes (t1, t2) != 1)
519 return false;
520
521 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
522 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
523 return false;
524
525 return true;
526 }
527
528 /* Compare T1 and T2 based on name or structure. */
529
530 static bool
531 odr_subtypes_equivalent_p (tree t1, tree t2,
532 hash_set<type_pair> *visited,
533 location_t loc1, location_t loc2)
534 {
535
536 /* This can happen in incomplete types that should be handled earlier. */
537 gcc_assert (t1 && t2);
538
539 if (t1 == t2)
540 return true;
541
542 /* Anonymous namespace types must match exactly. */
543 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
544 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
545 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
546 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
547 return false;
548
549 /* For ODR types be sure to compare their names.
550 To support -Wno-odr-type-merging we allow one type to be non-ODR
551 and other ODR even though it is a violation. */
552 if (types_odr_comparable (t1, t2))
553 {
554 if (t1 != t2
555 && odr_type_p (TYPE_MAIN_VARIANT (t1))
556 && get_odr_type (TYPE_MAIN_VARIANT (t1), true)->odr_violated)
557 return false;
558 if (!types_same_for_odr (t1, t2))
559 return false;
560 if (!type_variants_equivalent_p (t1, t2))
561 return false;
562 /* Limit recursion: If subtypes are ODR types and we know
563 that they are same, be happy. */
564 if (odr_type_p (TYPE_MAIN_VARIANT (t1)))
565 return true;
566 }
567
568 /* Component types, builtins and possibly violating ODR types
569 have to be compared structurally. */
570 if (TREE_CODE (t1) != TREE_CODE (t2))
571 return false;
572 if (AGGREGATE_TYPE_P (t1)
573 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
574 return false;
575
576 type_pair pair={TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)};
577 if (TYPE_UID (TYPE_MAIN_VARIANT (t1)) > TYPE_UID (TYPE_MAIN_VARIANT (t2)))
578 {
579 pair.first = TYPE_MAIN_VARIANT (t2);
580 pair.second = TYPE_MAIN_VARIANT (t1);
581 }
582 if (visited->add (pair))
583 return true;
584 if (!odr_types_equivalent_p (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
585 false, NULL, visited, loc1, loc2))
586 return false;
587 if (!type_variants_equivalent_p (t1, t2))
588 return false;
589 return true;
590 }
591
592 /* Return true if DECL1 and DECL2 are identical methods. Consider
593 name equivalent to name.localalias.xyz. */
594
595 static bool
596 methods_equal_p (tree decl1, tree decl2)
597 {
598 if (DECL_ASSEMBLER_NAME (decl1) == DECL_ASSEMBLER_NAME (decl2))
599 return true;
600 const char sep = symbol_table::symbol_suffix_separator ();
601
602 const char *name1 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl1));
603 const char *ptr1 = strchr (name1, sep);
604 int len1 = ptr1 ? ptr1 - name1 : strlen (name1);
605
606 const char *name2 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl2));
607 const char *ptr2 = strchr (name2, sep);
608 int len2 = ptr2 ? ptr2 - name2 : strlen (name2);
609
610 if (len1 != len2)
611 return false;
612 return !strncmp (name1, name2, len1);
613 }
614
615 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
616 violation warnings. */
617
618 void
619 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
620 {
621 int n1, n2;
622
623 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
624 {
625 odr_violation_reported = true;
626 if (DECL_VIRTUAL_P (prevailing->decl))
627 {
628 varpool_node *tmp = prevailing;
629 prevailing = vtable;
630 vtable = tmp;
631 }
632 auto_diagnostic_group d;
633 if (warning_at (DECL_SOURCE_LOCATION
634 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
635 OPT_Wodr,
636 "virtual table of type %qD violates one definition rule",
637 DECL_CONTEXT (vtable->decl)))
638 inform (DECL_SOURCE_LOCATION (prevailing->decl),
639 "variable of same assembler name as the virtual table is "
640 "defined in another translation unit");
641 return;
642 }
643 if (!prevailing->definition || !vtable->definition)
644 return;
645
646 /* If we do not stream ODR type info, do not bother to do useful compare. */
647 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
648 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
649 return;
650
651 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
652
653 if (class_type->odr_violated)
654 return;
655
656 for (n1 = 0, n2 = 0; true; n1++, n2++)
657 {
658 struct ipa_ref *ref1, *ref2;
659 bool end1, end2;
660
661 end1 = !prevailing->iterate_reference (n1, ref1);
662 end2 = !vtable->iterate_reference (n2, ref2);
663
664 /* !DECL_VIRTUAL_P means RTTI entry;
665 We warn when RTTI is lost because non-RTTI prevails; we silently
666 accept the other case. */
667 while (!end2
668 && (end1
669 || (methods_equal_p (ref1->referred->decl,
670 ref2->referred->decl)
671 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
672 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
673 {
674 if (!class_type->rtti_broken)
675 {
676 auto_diagnostic_group d;
677 if (warning_at (DECL_SOURCE_LOCATION
678 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
679 OPT_Wodr,
680 "virtual table of type %qD contains RTTI "
681 "information",
682 DECL_CONTEXT (vtable->decl)))
683 {
684 inform (DECL_SOURCE_LOCATION
685 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
686 "but is prevailed by one without from other"
687 " translation unit");
688 inform (DECL_SOURCE_LOCATION
689 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
690 "RTTI will not work on this type");
691 class_type->rtti_broken = true;
692 }
693 }
694 n2++;
695 end2 = !vtable->iterate_reference (n2, ref2);
696 }
697 while (!end1
698 && (end2
699 || (methods_equal_p (ref2->referred->decl, ref1->referred->decl)
700 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
701 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
702 {
703 n1++;
704 end1 = !prevailing->iterate_reference (n1, ref1);
705 }
706
707 /* Finished? */
708 if (end1 && end2)
709 {
710 /* Extra paranoia; compare the sizes. We do not have information
711 about virtual inheritance offsets, so just be sure that these
712 match.
713 Do this as very last check so the not very informative error
714 is not output too often. */
715 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
716 {
717 class_type->odr_violated = true;
718 auto_diagnostic_group d;
719 tree ctx = TYPE_NAME (DECL_CONTEXT (vtable->decl));
720 if (warning_at (DECL_SOURCE_LOCATION (ctx), OPT_Wodr,
721 "virtual table of type %qD violates "
722 "one definition rule",
723 DECL_CONTEXT (vtable->decl)))
724 {
725 ctx = TYPE_NAME (DECL_CONTEXT (prevailing->decl));
726 inform (DECL_SOURCE_LOCATION (ctx),
727 "the conflicting type defined in another translation"
728 " unit has virtual table of different size");
729 }
730 }
731 return;
732 }
733
734 if (!end1 && !end2)
735 {
736 if (methods_equal_p (ref1->referred->decl, ref2->referred->decl))
737 continue;
738
739 class_type->odr_violated = true;
740
741 /* If the loops above stopped on non-virtual pointer, we have
742 mismatch in RTTI information mangling. */
743 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
744 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
745 {
746 auto_diagnostic_group d;
747 if (warning_at (DECL_SOURCE_LOCATION
748 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
749 OPT_Wodr,
750 "virtual table of type %qD violates "
751 "one definition rule",
752 DECL_CONTEXT (vtable->decl)))
753 {
754 inform (DECL_SOURCE_LOCATION
755 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
756 "the conflicting type defined in another translation "
757 "unit with different RTTI information");
758 }
759 return;
760 }
761 /* At this point both REF1 and REF2 points either to virtual table
762 or virtual method. If one points to virtual table and other to
763 method we can complain the same way as if one table was shorter
764 than other pointing out the extra method. */
765 if (TREE_CODE (ref1->referred->decl)
766 != TREE_CODE (ref2->referred->decl))
767 {
768 if (VAR_P (ref1->referred->decl))
769 end1 = true;
770 else if (VAR_P (ref2->referred->decl))
771 end2 = true;
772 }
773 }
774
775 class_type->odr_violated = true;
776
777 /* Complain about size mismatch. Either we have too many virtual
778 functions or too many virtual table pointers. */
779 if (end1 || end2)
780 {
781 if (end1)
782 {
783 varpool_node *tmp = prevailing;
784 prevailing = vtable;
785 vtable = tmp;
786 ref1 = ref2;
787 }
788 auto_diagnostic_group d;
789 if (warning_at (DECL_SOURCE_LOCATION
790 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
791 OPT_Wodr,
792 "virtual table of type %qD violates "
793 "one definition rule",
794 DECL_CONTEXT (vtable->decl)))
795 {
796 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
797 {
798 inform (DECL_SOURCE_LOCATION
799 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
800 "the conflicting type defined in another translation "
801 "unit");
802 inform (DECL_SOURCE_LOCATION
803 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
804 "contains additional virtual method %qD",
805 ref1->referred->decl);
806 }
807 else
808 {
809 inform (DECL_SOURCE_LOCATION
810 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
811 "the conflicting type defined in another translation "
812 "unit has virtual table with more entries");
813 }
814 }
815 return;
816 }
817
818 /* And in the last case we have either mismatch in between two virtual
819 methods or two virtual table pointers. */
820 auto_diagnostic_group d;
821 if (warning_at (DECL_SOURCE_LOCATION
822 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
823 "virtual table of type %qD violates "
824 "one definition rule",
825 DECL_CONTEXT (vtable->decl)))
826 {
827 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
828 {
829 inform (DECL_SOURCE_LOCATION
830 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
831 "the conflicting type defined in another translation "
832 "unit");
833 gcc_assert (TREE_CODE (ref2->referred->decl)
834 == FUNCTION_DECL);
835 inform (DECL_SOURCE_LOCATION
836 (ref1->referred->ultimate_alias_target ()->decl),
837 "virtual method %qD",
838 ref1->referred->ultimate_alias_target ()->decl);
839 inform (DECL_SOURCE_LOCATION
840 (ref2->referred->ultimate_alias_target ()->decl),
841 "ought to match virtual method %qD but does not",
842 ref2->referred->ultimate_alias_target ()->decl);
843 }
844 else
845 inform (DECL_SOURCE_LOCATION
846 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
847 "the conflicting type defined in another translation "
848 "unit has virtual table with different contents");
849 return;
850 }
851 }
852 }
853
854 /* Output ODR violation warning about T1 and T2 with REASON.
855 Display location of ST1 and ST2 if REASON speaks about field or
856 method of the type.
857 If WARN is false, do nothing. Set WARNED if warning was indeed
858 output. */
859
860 static void
861 warn_odr (tree t1, tree t2, tree st1, tree st2,
862 bool warn, bool *warned, const char *reason)
863 {
864 tree decl2 = TYPE_NAME (TYPE_MAIN_VARIANT (t2));
865 if (warned)
866 *warned = false;
867
868 if (!warn || !TYPE_NAME(TYPE_MAIN_VARIANT (t1)))
869 return;
870
871 /* ODR warnings are output during LTO streaming; we must apply location
872 cache for potential warnings to be output correctly. */
873 if (lto_location_cache::current_cache)
874 lto_location_cache::current_cache->apply_location_cache ();
875
876 auto_diagnostic_group d;
877 if (t1 != TYPE_MAIN_VARIANT (t1)
878 && TYPE_NAME (t1) != TYPE_NAME (TYPE_MAIN_VARIANT (t1)))
879 {
880 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
881 OPT_Wodr, "type %qT (typedef of %qT) violates the "
882 "C++ One Definition Rule",
883 t1, TYPE_MAIN_VARIANT (t1)))
884 return;
885 }
886 else
887 {
888 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (TYPE_MAIN_VARIANT (t1))),
889 OPT_Wodr, "type %qT violates the C++ One Definition Rule",
890 t1))
891 return;
892 }
893 if (!st1 && !st2)
894 ;
895 /* For FIELD_DECL support also case where one of fields is
896 NULL - this is used when the structures have mismatching number of
897 elements. */
898 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
899 {
900 inform (DECL_SOURCE_LOCATION (decl2),
901 "a different type is defined in another translation unit");
902 if (!st1)
903 {
904 st1 = st2;
905 st2 = NULL;
906 }
907 inform (DECL_SOURCE_LOCATION (st1),
908 "the first difference of corresponding definitions is field %qD",
909 st1);
910 if (st2)
911 decl2 = st2;
912 }
913 else if (TREE_CODE (st1) == FUNCTION_DECL)
914 {
915 inform (DECL_SOURCE_LOCATION (decl2),
916 "a different type is defined in another translation unit");
917 inform (DECL_SOURCE_LOCATION (st1),
918 "the first difference of corresponding definitions is method %qD",
919 st1);
920 decl2 = st2;
921 }
922 else
923 return;
924 inform (DECL_SOURCE_LOCATION (decl2), reason);
925
926 if (warned)
927 *warned = true;
928 }
929
930 /* Return true if T1 and T2 are incompatible and we want to recursively
931 dive into them from warn_type_mismatch to give sensible answer. */
932
933 static bool
934 type_mismatch_p (tree t1, tree t2)
935 {
936 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
937 && !odr_types_equivalent_p (t1, t2))
938 return true;
939 return !types_compatible_p (t1, t2);
940 }
941
942
943 /* Types T1 and T2 was found to be incompatible in a context they can't
944 (either used to declare a symbol of same assembler name or unified by
945 ODR rule). We already output warning about this, but if possible, output
946 extra information on how the types mismatch.
947
948 This is hard to do in general. We basically handle the common cases.
949
950 If LOC1 and LOC2 are meaningful locations, use it in the case the types
951 themselves do not have one. */
952
953 void
954 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
955 {
956 /* Location of type is known only if it has TYPE_NAME and the name is
957 TYPE_DECL. */
958 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
959 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
960 : UNKNOWN_LOCATION;
961 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
962 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
963 : UNKNOWN_LOCATION;
964 bool loc_t2_useful = false;
965
966 /* With LTO it is a common case that the location of both types match.
967 See if T2 has a location that is different from T1. If so, we will
968 inform user about the location.
969 Do not consider the location passed to us in LOC1/LOC2 as those are
970 already output. */
971 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
972 {
973 if (loc_t1 <= BUILTINS_LOCATION)
974 loc_t2_useful = true;
975 else
976 {
977 expanded_location xloc1 = expand_location (loc_t1);
978 expanded_location xloc2 = expand_location (loc_t2);
979
980 if (strcmp (xloc1.file, xloc2.file)
981 || xloc1.line != xloc2.line
982 || xloc1.column != xloc2.column)
983 loc_t2_useful = true;
984 }
985 }
986
987 if (loc_t1 <= BUILTINS_LOCATION)
988 loc_t1 = loc1;
989 if (loc_t2 <= BUILTINS_LOCATION)
990 loc_t2 = loc2;
991
992 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
993
994 /* It is a quite common bug to reference anonymous namespace type in
995 non-anonymous namespace class. */
996 tree mt1 = TYPE_MAIN_VARIANT (t1);
997 tree mt2 = TYPE_MAIN_VARIANT (t2);
998 if ((type_with_linkage_p (mt1)
999 && type_in_anonymous_namespace_p (mt1))
1000 || (type_with_linkage_p (mt2)
1001 && type_in_anonymous_namespace_p (mt2)))
1002 {
1003 if (!type_with_linkage_p (mt1)
1004 || !type_in_anonymous_namespace_p (mt1))
1005 {
1006 std::swap (t1, t2);
1007 std::swap (mt1, mt2);
1008 std::swap (loc_t1, loc_t2);
1009 }
1010 gcc_assert (TYPE_NAME (mt1)
1011 && TREE_CODE (TYPE_NAME (mt1)) == TYPE_DECL);
1012 tree n1 = TYPE_NAME (mt1);
1013 tree n2 = TYPE_NAME (mt2) ? TYPE_NAME (mt2) : NULL;
1014
1015 if (TREE_CODE (n1) == TYPE_DECL)
1016 n1 = DECL_NAME (n1);
1017 if (n2 && TREE_CODE (n2) == TYPE_DECL)
1018 n2 = DECL_NAME (n2);
1019 /* Most of the time, the type names will match, do not be unnecessarily
1020 verbose. */
1021 if (n1 != n2)
1022 inform (loc_t1,
1023 "type %qT defined in anonymous namespace cannot match "
1024 "type %qT across the translation unit boundary",
1025 t1, t2);
1026 else
1027 inform (loc_t1,
1028 "type %qT defined in anonymous namespace cannot match "
1029 "across the translation unit boundary",
1030 t1);
1031 if (loc_t2_useful)
1032 inform (loc_t2,
1033 "the incompatible type defined in another translation unit");
1034 return;
1035 }
1036 /* If types have mangled ODR names and they are different, it is most
1037 informative to output those.
1038 This also covers types defined in different namespaces. */
1039 const char *odr1 = get_odr_name_for_type (mt1);
1040 const char *odr2 = get_odr_name_for_type (mt2);
1041 if (odr1 != NULL && odr2 != NULL && odr1 != odr2)
1042 {
1043 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
1044 char *name1 = xstrdup (cplus_demangle (odr1, opts));
1045 char *name2 = cplus_demangle (odr2, opts);
1046 if (name1 && name2 && strcmp (name1, name2))
1047 {
1048 inform (loc_t1,
1049 "type name %qs should match type name %qs",
1050 name1, name2);
1051 if (loc_t2_useful)
1052 inform (loc_t2,
1053 "the incompatible type is defined here");
1054 free (name1);
1055 return;
1056 }
1057 free (name1);
1058 }
1059 /* A tricky case are compound types. Often they appear the same in source
1060 code and the mismatch is dragged in by type they are build from.
1061 Look for those differences in subtypes and try to be informative. In other
1062 cases just output nothing because the source code is probably different
1063 and in this case we already output a all necessary info. */
1064 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1065 {
1066 if (TREE_CODE (t1) == TREE_CODE (t2))
1067 {
1068 if (TREE_CODE (t1) == ARRAY_TYPE
1069 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1070 {
1071 tree i1 = TYPE_DOMAIN (t1);
1072 tree i2 = TYPE_DOMAIN (t2);
1073
1074 if (i1 && i2
1075 && TYPE_MAX_VALUE (i1)
1076 && TYPE_MAX_VALUE (i2)
1077 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1078 TYPE_MAX_VALUE (i2), 0))
1079 {
1080 inform (loc,
1081 "array types have different bounds");
1082 return;
1083 }
1084 }
1085 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1086 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1087 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1088 else if (TREE_CODE (t1) == METHOD_TYPE
1089 || TREE_CODE (t1) == FUNCTION_TYPE)
1090 {
1091 tree parms1 = NULL, parms2 = NULL;
1092 int count = 1;
1093
1094 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1095 {
1096 inform (loc, "return value type mismatch");
1097 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1098 loc_t2);
1099 return;
1100 }
1101 if (prototype_p (t1) && prototype_p (t2))
1102 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1103 parms1 && parms2;
1104 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1105 count++)
1106 {
1107 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1108 {
1109 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1110 inform (loc,
1111 "implicit this pointer type mismatch");
1112 else
1113 inform (loc,
1114 "type mismatch in parameter %i",
1115 count - (TREE_CODE (t1) == METHOD_TYPE));
1116 warn_types_mismatch (TREE_VALUE (parms1),
1117 TREE_VALUE (parms2),
1118 loc_t1, loc_t2);
1119 return;
1120 }
1121 }
1122 if (parms1 || parms2)
1123 {
1124 inform (loc,
1125 "types have different parameter counts");
1126 return;
1127 }
1128 }
1129 }
1130 return;
1131 }
1132
1133 if (types_odr_comparable (t1, t2)
1134 /* We make assign integers mangled names to be able to handle
1135 signed/unsigned chars. Accepting them here would however lead to
1136 confusing message like
1137 "type ‘const int’ itself violates the C++ One Definition Rule" */
1138 && TREE_CODE (t1) != INTEGER_TYPE
1139 && types_same_for_odr (t1, t2))
1140 inform (loc_t1,
1141 "type %qT itself violates the C++ One Definition Rule", t1);
1142 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1143 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1144 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1145 return;
1146 else
1147 inform (loc_t1, "type %qT should match type %qT",
1148 t1, t2);
1149 if (loc_t2_useful)
1150 inform (loc_t2, "the incompatible type is defined here");
1151 }
1152
1153 /* Return true if T should be ignored in TYPE_FIELDS for ODR comparison. */
1154
1155 static bool
1156 skip_in_fields_list_p (tree t)
1157 {
1158 if (TREE_CODE (t) != FIELD_DECL)
1159 return true;
1160 /* C++ FE introduces zero sized fields depending on -std setting, see
1161 PR89358. */
1162 if (DECL_SIZE (t)
1163 && integer_zerop (DECL_SIZE (t))
1164 && DECL_ARTIFICIAL (t)
1165 && DECL_IGNORED_P (t)
1166 && !DECL_NAME (t))
1167 return true;
1168 return false;
1169 }
1170
1171 /* Compare T1 and T2, report ODR violations if WARN is true and set
1172 WARNED to true if anything is reported. Return true if types match.
1173 If true is returned, the types are also compatible in the sense of
1174 gimple_canonical_types_compatible_p.
1175 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1176 about the type if the type itself do not have location. */
1177
1178 static bool
1179 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1180 hash_set<type_pair> *visited,
1181 location_t loc1, location_t loc2)
1182 {
1183 /* Check first for the obvious case of pointer identity. */
1184 if (t1 == t2)
1185 return true;
1186
1187 /* Can't be the same type if the types don't have the same code. */
1188 if (TREE_CODE (t1) != TREE_CODE (t2))
1189 {
1190 warn_odr (t1, t2, NULL, NULL, warn, warned,
1191 G_("a different type is defined in another translation unit"));
1192 return false;
1193 }
1194
1195 if ((type_with_linkage_p (TYPE_MAIN_VARIANT (t1))
1196 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t1)))
1197 || (type_with_linkage_p (TYPE_MAIN_VARIANT (t2))
1198 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t2))))
1199 {
1200 /* We cannot trip this when comparing ODR types, only when trying to
1201 match different ODR derivations from different declarations.
1202 So WARN should be always false. */
1203 gcc_assert (!warn);
1204 return false;
1205 }
1206
1207 if (TREE_CODE (t1) == ENUMERAL_TYPE
1208 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1209 {
1210 tree v1, v2;
1211 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1212 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1213 {
1214 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1215 {
1216 warn_odr (t1, t2, NULL, NULL, warn, warned,
1217 G_("an enum with different value name"
1218 " is defined in another translation unit"));
1219 return false;
1220 }
1221 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 0))
1222 {
1223 warn_odr (t1, t2, NULL, NULL, warn, warned,
1224 G_("an enum with different values is defined"
1225 " in another translation unit"));
1226 return false;
1227 }
1228 }
1229 if (v1 || v2)
1230 {
1231 warn_odr (t1, t2, NULL, NULL, warn, warned,
1232 G_("an enum with mismatching number of values "
1233 "is defined in another translation unit"));
1234 return false;
1235 }
1236 }
1237
1238 /* Non-aggregate types can be handled cheaply. */
1239 if (INTEGRAL_TYPE_P (t1)
1240 || SCALAR_FLOAT_TYPE_P (t1)
1241 || FIXED_POINT_TYPE_P (t1)
1242 || TREE_CODE (t1) == VECTOR_TYPE
1243 || TREE_CODE (t1) == COMPLEX_TYPE
1244 || TREE_CODE (t1) == OFFSET_TYPE
1245 || POINTER_TYPE_P (t1))
1246 {
1247 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1248 {
1249 warn_odr (t1, t2, NULL, NULL, warn, warned,
1250 G_("a type with different precision is defined "
1251 "in another translation unit"));
1252 return false;
1253 }
1254 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1255 {
1256 warn_odr (t1, t2, NULL, NULL, warn, warned,
1257 G_("a type with different signedness is defined "
1258 "in another translation unit"));
1259 return false;
1260 }
1261
1262 if (TREE_CODE (t1) == INTEGER_TYPE
1263 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1264 {
1265 /* char WRT uint_8? */
1266 warn_odr (t1, t2, NULL, NULL, warn, warned,
1267 G_("a different type is defined in another "
1268 "translation unit"));
1269 return false;
1270 }
1271
1272 /* For canonical type comparisons we do not want to build SCCs
1273 so we cannot compare pointed-to types. But we can, for now,
1274 require the same pointed-to type kind and match what
1275 useless_type_conversion_p would do. */
1276 if (POINTER_TYPE_P (t1))
1277 {
1278 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1279 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1280 {
1281 warn_odr (t1, t2, NULL, NULL, warn, warned,
1282 G_("it is defined as a pointer in different address "
1283 "space in another translation unit"));
1284 return false;
1285 }
1286
1287 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1288 visited, loc1, loc2))
1289 {
1290 warn_odr (t1, t2, NULL, NULL, warn, warned,
1291 G_("it is defined as a pointer to different type "
1292 "in another translation unit"));
1293 if (warn && warned)
1294 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1295 loc1, loc2);
1296 return false;
1297 }
1298 }
1299
1300 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1301 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1302 visited, loc1, loc2))
1303 {
1304 /* Probably specific enough. */
1305 warn_odr (t1, t2, NULL, NULL, warn, warned,
1306 G_("a different type is defined "
1307 "in another translation unit"));
1308 if (warn && warned)
1309 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1310 return false;
1311 }
1312 }
1313 /* Do type-specific comparisons. */
1314 else switch (TREE_CODE (t1))
1315 {
1316 case ARRAY_TYPE:
1317 {
1318 /* Array types are the same if the element types are the same and
1319 the number of elements are the same. */
1320 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1321 visited, loc1, loc2))
1322 {
1323 warn_odr (t1, t2, NULL, NULL, warn, warned,
1324 G_("a different type is defined in another "
1325 "translation unit"));
1326 if (warn && warned)
1327 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1328 }
1329 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1330 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1331 == TYPE_NONALIASED_COMPONENT (t2));
1332
1333 tree i1 = TYPE_DOMAIN (t1);
1334 tree i2 = TYPE_DOMAIN (t2);
1335
1336 /* For an incomplete external array, the type domain can be
1337 NULL_TREE. Check this condition also. */
1338 if (i1 == NULL_TREE || i2 == NULL_TREE)
1339 return type_variants_equivalent_p (t1, t2);
1340
1341 tree min1 = TYPE_MIN_VALUE (i1);
1342 tree min2 = TYPE_MIN_VALUE (i2);
1343 tree max1 = TYPE_MAX_VALUE (i1);
1344 tree max2 = TYPE_MAX_VALUE (i2);
1345
1346 /* In C++, minimums should be always 0. */
1347 gcc_assert (min1 == min2);
1348 if (!operand_equal_p (max1, max2, 0))
1349 {
1350 warn_odr (t1, t2, NULL, NULL, warn, warned,
1351 G_("an array of different size is defined "
1352 "in another translation unit"));
1353 return false;
1354 }
1355 }
1356 break;
1357
1358 case METHOD_TYPE:
1359 case FUNCTION_TYPE:
1360 /* Function types are the same if the return type and arguments types
1361 are the same. */
1362 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1363 visited, loc1, loc2))
1364 {
1365 warn_odr (t1, t2, NULL, NULL, warn, warned,
1366 G_("has different return value "
1367 "in another translation unit"));
1368 if (warn && warned)
1369 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1370 return false;
1371 }
1372
1373 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1374 || !prototype_p (t1) || !prototype_p (t2))
1375 return type_variants_equivalent_p (t1, t2);
1376 else
1377 {
1378 tree parms1, parms2;
1379
1380 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1381 parms1 && parms2;
1382 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1383 {
1384 if (!odr_subtypes_equivalent_p
1385 (TREE_VALUE (parms1), TREE_VALUE (parms2),
1386 visited, loc1, loc2))
1387 {
1388 warn_odr (t1, t2, NULL, NULL, warn, warned,
1389 G_("has different parameters in another "
1390 "translation unit"));
1391 if (warn && warned)
1392 warn_types_mismatch (TREE_VALUE (parms1),
1393 TREE_VALUE (parms2), loc1, loc2);
1394 return false;
1395 }
1396 }
1397
1398 if (parms1 || parms2)
1399 {
1400 warn_odr (t1, t2, NULL, NULL, warn, warned,
1401 G_("has different parameters "
1402 "in another translation unit"));
1403 return false;
1404 }
1405
1406 return type_variants_equivalent_p (t1, t2);
1407 }
1408
1409 case RECORD_TYPE:
1410 case UNION_TYPE:
1411 case QUAL_UNION_TYPE:
1412 {
1413 tree f1, f2;
1414
1415 /* For aggregate types, all the fields must be the same. */
1416 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1417 {
1418 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1419 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1420 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1421 {
1422 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1423 warn_odr (t1, t2, NULL, NULL, warn, warned,
1424 G_("a type defined in another translation unit "
1425 "is not polymorphic"));
1426 else
1427 warn_odr (t1, t2, NULL, NULL, warn, warned,
1428 G_("a type defined in another translation unit "
1429 "is polymorphic"));
1430 return false;
1431 }
1432 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1433 f1 || f2;
1434 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1435 {
1436 /* Skip non-fields. */
1437 while (f1 && skip_in_fields_list_p (f1))
1438 f1 = TREE_CHAIN (f1);
1439 while (f2 && skip_in_fields_list_p (f2))
1440 f2 = TREE_CHAIN (f2);
1441 if (!f1 || !f2)
1442 break;
1443 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1444 {
1445 warn_odr (t1, t2, NULL, NULL, warn, warned,
1446 G_("a type with different virtual table pointers"
1447 " is defined in another translation unit"));
1448 return false;
1449 }
1450 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1451 {
1452 warn_odr (t1, t2, NULL, NULL, warn, warned,
1453 G_("a type with different bases is defined "
1454 "in another translation unit"));
1455 return false;
1456 }
1457 if (DECL_NAME (f1) != DECL_NAME (f2)
1458 && !DECL_ARTIFICIAL (f1))
1459 {
1460 warn_odr (t1, t2, f1, f2, warn, warned,
1461 G_("a field with different name is defined "
1462 "in another translation unit"));
1463 return false;
1464 }
1465 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1466 TREE_TYPE (f2),
1467 visited, loc1, loc2))
1468 {
1469 /* Do not warn about artificial fields and just go into
1470 generic field mismatch warning. */
1471 if (DECL_ARTIFICIAL (f1))
1472 break;
1473
1474 warn_odr (t1, t2, f1, f2, warn, warned,
1475 G_("a field of same name but different type "
1476 "is defined in another translation unit"));
1477 if (warn && warned)
1478 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1479 return false;
1480 }
1481 if (!gimple_compare_field_offset (f1, f2))
1482 {
1483 /* Do not warn about artificial fields and just go into
1484 generic field mismatch warning. */
1485 if (DECL_ARTIFICIAL (f1))
1486 break;
1487 warn_odr (t1, t2, f1, f2, warn, warned,
1488 G_("fields have different layout "
1489 "in another translation unit"));
1490 return false;
1491 }
1492 if (DECL_BIT_FIELD (f1) != DECL_BIT_FIELD (f2))
1493 {
1494 warn_odr (t1, t2, f1, f2, warn, warned,
1495 G_("one field is a bitfield while the other "
1496 "is not"));
1497 return false;
1498 }
1499 else
1500 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1501 == DECL_NONADDRESSABLE_P (f2));
1502 }
1503
1504 /* If one aggregate has more fields than the other, they
1505 are not the same. */
1506 if (f1 || f2)
1507 {
1508 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1509 warn_odr (t1, t2, NULL, NULL, warn, warned,
1510 G_("a type with different virtual table pointers"
1511 " is defined in another translation unit"));
1512 else if ((f1 && DECL_ARTIFICIAL (f1))
1513 || (f2 && DECL_ARTIFICIAL (f2)))
1514 warn_odr (t1, t2, NULL, NULL, warn, warned,
1515 G_("a type with different bases is defined "
1516 "in another translation unit"));
1517 else
1518 warn_odr (t1, t2, f1, f2, warn, warned,
1519 G_("a type with different number of fields "
1520 "is defined in another translation unit"));
1521
1522 return false;
1523 }
1524 }
1525 break;
1526 }
1527 case VOID_TYPE:
1528 case NULLPTR_TYPE:
1529 break;
1530
1531 default:
1532 debug_tree (t1);
1533 gcc_unreachable ();
1534 }
1535
1536 /* Those are better to come last as they are utterly uninformative. */
1537 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1538 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1539 {
1540 warn_odr (t1, t2, NULL, NULL, warn, warned,
1541 G_("a type with different size "
1542 "is defined in another translation unit"));
1543 return false;
1544 }
1545
1546 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1547 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1548 TYPE_SIZE_UNIT (t2), 0));
1549 return type_variants_equivalent_p (t1, t2);
1550 }
1551
1552 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1553
1554 bool
1555 odr_types_equivalent_p (tree type1, tree type2)
1556 {
1557 gcc_checking_assert (odr_or_derived_type_p (type1)
1558 && odr_or_derived_type_p (type2));
1559
1560 hash_set<type_pair> visited;
1561 return odr_types_equivalent_p (type1, type2, false, NULL,
1562 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1563 }
1564
1565 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1566 from VAL->type. This may happen in LTO where tree merging did not merge
1567 all variants of the same type or due to ODR violation.
1568
1569 Analyze and report ODR violations and add type to duplicate list.
1570 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1571 this is first time we see definition of a class return true so the
1572 base types are analyzed. */
1573
1574 static bool
1575 add_type_duplicate (odr_type val, tree type)
1576 {
1577 bool build_bases = false;
1578 bool prevail = false;
1579 bool odr_must_violate = false;
1580
1581 if (!val->types_set)
1582 val->types_set = new hash_set<tree>;
1583
1584 /* Chose polymorphic type as leader (this happens only in case of ODR
1585 violations. */
1586 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1587 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1588 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1589 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1590 {
1591 prevail = true;
1592 build_bases = true;
1593 }
1594 /* Always prefer complete type to be the leader. */
1595 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1596 {
1597 prevail = true;
1598 if (TREE_CODE (type) == RECORD_TYPE)
1599 build_bases = TYPE_BINFO (type);
1600 }
1601 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1602 ;
1603 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1604 && TREE_CODE (type) == ENUMERAL_TYPE
1605 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1606 prevail = true;
1607 else if (TREE_CODE (val->type) == RECORD_TYPE
1608 && TREE_CODE (type) == RECORD_TYPE
1609 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1610 {
1611 gcc_assert (!val->bases.length ());
1612 build_bases = true;
1613 prevail = true;
1614 }
1615
1616 if (prevail)
1617 std::swap (val->type, type);
1618
1619 val->types_set->add (type);
1620
1621 if (!odr_hash)
1622 return false;
1623
1624 gcc_checking_assert (can_be_name_hashed_p (type)
1625 && can_be_name_hashed_p (val->type));
1626
1627 bool merge = true;
1628 bool base_mismatch = false;
1629 unsigned int i;
1630 bool warned = false;
1631 hash_set<type_pair> visited;
1632
1633 gcc_assert (in_lto_p);
1634 vec_safe_push (val->types, type);
1635
1636 /* If both are class types, compare the bases. */
1637 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1638 && TREE_CODE (val->type) == RECORD_TYPE
1639 && TREE_CODE (type) == RECORD_TYPE
1640 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1641 {
1642 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1643 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1644 {
1645 if (!flag_ltrans && !warned && !val->odr_violated)
1646 {
1647 tree extra_base;
1648 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1649 "a type with the same name but different "
1650 "number of polymorphic bases is "
1651 "defined in another translation unit");
1652 if (warned)
1653 {
1654 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1655 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1656 extra_base = BINFO_BASE_BINFO
1657 (TYPE_BINFO (type),
1658 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1659 else
1660 extra_base = BINFO_BASE_BINFO
1661 (TYPE_BINFO (val->type),
1662 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1663 tree extra_base_type = BINFO_TYPE (extra_base);
1664 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1665 "the extra base is defined here");
1666 }
1667 }
1668 base_mismatch = true;
1669 }
1670 else
1671 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1672 {
1673 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1674 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1675 tree type1 = BINFO_TYPE (base1);
1676 tree type2 = BINFO_TYPE (base2);
1677
1678 if (types_odr_comparable (type1, type2))
1679 {
1680 if (!types_same_for_odr (type1, type2))
1681 base_mismatch = true;
1682 }
1683 else
1684 if (!odr_types_equivalent_p (type1, type2))
1685 base_mismatch = true;
1686 if (base_mismatch)
1687 {
1688 if (!warned && !val->odr_violated)
1689 {
1690 warn_odr (type, val->type, NULL, NULL,
1691 !warned, &warned,
1692 "a type with the same name but different base "
1693 "type is defined in another translation unit");
1694 if (warned)
1695 warn_types_mismatch (type1, type2,
1696 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1697 }
1698 break;
1699 }
1700 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1701 {
1702 base_mismatch = true;
1703 if (!warned && !val->odr_violated)
1704 warn_odr (type, val->type, NULL, NULL,
1705 !warned, &warned,
1706 "a type with the same name but different base "
1707 "layout is defined in another translation unit");
1708 break;
1709 }
1710 /* One of bases is not of complete type. */
1711 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1712 {
1713 /* If we have a polymorphic type info specified for TYPE1
1714 but not for TYPE2 we possibly missed a base when recording
1715 VAL->type earlier.
1716 Be sure this does not happen. */
1717 if (TYPE_BINFO (type1)
1718 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1719 && !build_bases)
1720 odr_must_violate = true;
1721 break;
1722 }
1723 /* One base is polymorphic and the other not.
1724 This ought to be diagnosed earlier, but do not ICE in the
1725 checking bellow. */
1726 else if (TYPE_BINFO (type1)
1727 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1728 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1729 {
1730 if (!warned && !val->odr_violated)
1731 warn_odr (type, val->type, NULL, NULL,
1732 !warned, &warned,
1733 "a base of the type is polymorphic only in one "
1734 "translation unit");
1735 base_mismatch = true;
1736 break;
1737 }
1738 }
1739 if (base_mismatch)
1740 {
1741 merge = false;
1742 odr_violation_reported = true;
1743 val->odr_violated = true;
1744
1745 if (symtab->dump_file)
1746 {
1747 fprintf (symtab->dump_file, "ODR base violation\n");
1748
1749 print_node (symtab->dump_file, "", val->type, 0);
1750 putc ('\n',symtab->dump_file);
1751 print_node (symtab->dump_file, "", type, 0);
1752 putc ('\n',symtab->dump_file);
1753 }
1754 }
1755 }
1756
1757 /* Next compare memory layout.
1758 The DECL_SOURCE_LOCATIONs in this invocation came from LTO streaming.
1759 We must apply the location cache to ensure that they are valid
1760 before we can pass them to odr_types_equivalent_p (PR lto/83121). */
1761 if (lto_location_cache::current_cache)
1762 lto_location_cache::current_cache->apply_location_cache ();
1763 /* As a special case we stream mangles names of integer types so we can see
1764 if they are believed to be same even though they have different
1765 representation. Avoid bogus warning on mismatches in these. */
1766 if (TREE_CODE (type) != INTEGER_TYPE
1767 && TREE_CODE (val->type) != INTEGER_TYPE
1768 && !odr_types_equivalent_p (val->type, type,
1769 !flag_ltrans && !val->odr_violated && !warned,
1770 &warned, &visited,
1771 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1772 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1773 {
1774 merge = false;
1775 odr_violation_reported = true;
1776 val->odr_violated = true;
1777 }
1778 gcc_assert (val->odr_violated || !odr_must_violate);
1779 /* Sanity check that all bases will be build same way again. */
1780 if (flag_checking
1781 && COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1782 && TREE_CODE (val->type) == RECORD_TYPE
1783 && TREE_CODE (type) == RECORD_TYPE
1784 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1785 && !val->odr_violated
1786 && !base_mismatch && val->bases.length ())
1787 {
1788 unsigned int num_poly_bases = 0;
1789 unsigned int j;
1790
1791 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1792 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1793 (TYPE_BINFO (type), i)))
1794 num_poly_bases++;
1795 gcc_assert (num_poly_bases == val->bases.length ());
1796 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1797 i++)
1798 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1799 (TYPE_BINFO (type), i)))
1800 {
1801 odr_type base = get_odr_type
1802 (BINFO_TYPE
1803 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1804 i)),
1805 true);
1806 gcc_assert (val->bases[j] == base);
1807 j++;
1808 }
1809 }
1810
1811
1812 /* Regularize things a little. During LTO same types may come with
1813 different BINFOs. Either because their virtual table was
1814 not merged by tree merging and only later at decl merging or
1815 because one type comes with external vtable, while other
1816 with internal. We want to merge equivalent binfos to conserve
1817 memory and streaming overhead.
1818
1819 The external vtables are more harmful: they contain references
1820 to external declarations of methods that may be defined in the
1821 merged LTO unit. For this reason we absolutely need to remove
1822 them and replace by internal variants. Not doing so will lead
1823 to incomplete answers from possible_polymorphic_call_targets.
1824
1825 FIXME: disable for now; because ODR types are now build during
1826 streaming in, the variants do not need to be linked to the type,
1827 yet. We need to do the merging in cleanup pass to be implemented
1828 soon. */
1829 if (!flag_ltrans && merge
1830 && 0
1831 && TREE_CODE (val->type) == RECORD_TYPE
1832 && TREE_CODE (type) == RECORD_TYPE
1833 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1834 && TYPE_MAIN_VARIANT (type) == type
1835 && TYPE_MAIN_VARIANT (val->type) == val->type
1836 && BINFO_VTABLE (TYPE_BINFO (val->type))
1837 && BINFO_VTABLE (TYPE_BINFO (type)))
1838 {
1839 tree master_binfo = TYPE_BINFO (val->type);
1840 tree v1 = BINFO_VTABLE (master_binfo);
1841 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1842
1843 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1844 {
1845 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1846 && operand_equal_p (TREE_OPERAND (v1, 1),
1847 TREE_OPERAND (v2, 1), 0));
1848 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1849 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1850 }
1851 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1852 == DECL_ASSEMBLER_NAME (v2));
1853
1854 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1855 {
1856 unsigned int i;
1857
1858 set_type_binfo (val->type, TYPE_BINFO (type));
1859 for (i = 0; i < val->types->length (); i++)
1860 {
1861 if (TYPE_BINFO ((*val->types)[i])
1862 == master_binfo)
1863 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1864 }
1865 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1866 }
1867 else
1868 set_type_binfo (type, master_binfo);
1869 }
1870 return build_bases;
1871 }
1872
1873 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
1874
1875 tree
1876 obj_type_ref_class (const_tree ref)
1877 {
1878 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
1879 ref = TREE_TYPE (ref);
1880 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1881 ref = TREE_TYPE (ref);
1882 /* We look for type THIS points to. ObjC also builds
1883 OBJ_TYPE_REF with non-method calls, Their first parameter
1884 ID however also corresponds to class type. */
1885 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
1886 || TREE_CODE (ref) == FUNCTION_TYPE);
1887 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
1888 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
1889 tree ret = TREE_TYPE (ref);
1890 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (ret))
1891 ret = TYPE_CANONICAL (ret);
1892 else
1893 ret = get_odr_type (ret)->type;
1894 return ret;
1895 }
1896
1897 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1898 possibly new entry. */
1899
1900 odr_type
1901 get_odr_type (tree type, bool insert)
1902 {
1903 odr_type_d **slot = NULL;
1904 odr_type val = NULL;
1905 hashval_t hash;
1906 bool build_bases = false;
1907 bool insert_to_odr_array = false;
1908 int base_id = -1;
1909
1910 type = TYPE_MAIN_VARIANT (type);
1911 if (!in_lto_p && !TYPE_STRUCTURAL_EQUALITY_P (type))
1912 type = TYPE_CANONICAL (type);
1913
1914 gcc_checking_assert (can_be_name_hashed_p (type));
1915
1916 hash = hash_odr_name (type);
1917 slot = odr_hash->find_slot_with_hash (type, hash,
1918 insert ? INSERT : NO_INSERT);
1919
1920 if (!slot)
1921 return NULL;
1922
1923 /* See if we already have entry for type. */
1924 if (*slot)
1925 {
1926 val = *slot;
1927
1928 if (val->type != type && insert
1929 && (!val->types_set || !val->types_set->add (type)))
1930 build_bases = add_type_duplicate (val, type);
1931 }
1932 else
1933 {
1934 val = ggc_cleared_alloc<odr_type_d> ();
1935 val->type = type;
1936 val->bases = vNULL;
1937 val->derived_types = vNULL;
1938 if (type_with_linkage_p (type))
1939 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1940 else
1941 val->anonymous_namespace = 0;
1942 build_bases = COMPLETE_TYPE_P (val->type);
1943 insert_to_odr_array = true;
1944 *slot = val;
1945 }
1946
1947 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1948 && type_with_linkage_p (type)
1949 && type == TYPE_MAIN_VARIANT (type))
1950 {
1951 tree binfo = TYPE_BINFO (type);
1952 unsigned int i;
1953
1954 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1955
1956 val->all_derivations_known = type_all_derivations_known_p (type);
1957 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1958 /* For now record only polymorphic types. other are
1959 pointless for devirtualization and we cannot precisely
1960 determine ODR equivalency of these during LTO. */
1961 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1962 {
1963 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1964 odr_type base = get_odr_type (base_type, true);
1965 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1966 base->derived_types.safe_push (val);
1967 val->bases.safe_push (base);
1968 if (base->id > base_id)
1969 base_id = base->id;
1970 }
1971 }
1972 /* Ensure that type always appears after bases. */
1973 if (insert_to_odr_array)
1974 {
1975 if (odr_types_ptr)
1976 val->id = odr_types.length ();
1977 vec_safe_push (odr_types_ptr, val);
1978 }
1979 else if (base_id > val->id)
1980 {
1981 odr_types[val->id] = 0;
1982 /* Be sure we did not recorded any derived types; these may need
1983 renumbering too. */
1984 gcc_assert (val->derived_types.length() == 0);
1985 val->id = odr_types.length ();
1986 vec_safe_push (odr_types_ptr, val);
1987 }
1988 return val;
1989 }
1990
1991 /* Return type that in ODR type hash prevailed TYPE. Be careful and punt
1992 on ODR violations. */
1993
1994 tree
1995 prevailing_odr_type (tree type)
1996 {
1997 odr_type t = get_odr_type (type, false);
1998 if (!t || t->odr_violated)
1999 return type;
2000 return t->type;
2001 }
2002
2003 /* Set tbaa_enabled flag for TYPE. */
2004
2005 void
2006 enable_odr_based_tbaa (tree type)
2007 {
2008 odr_type t = get_odr_type (type, true);
2009 t->tbaa_enabled = true;
2010 }
2011
2012 /* True if canonical type of TYPE is determined using ODR name. */
2013
2014 bool
2015 odr_based_tbaa_p (const_tree type)
2016 {
2017 if (!RECORD_OR_UNION_TYPE_P (type))
2018 return false;
2019 odr_type t = get_odr_type (const_cast <tree> (type), false);
2020 if (!t || !t->tbaa_enabled)
2021 return false;
2022 return true;
2023 }
2024
2025 /* Set TYPE_CANONICAL of type and all its variants and duplicates
2026 to CANONICAL. */
2027
2028 void
2029 set_type_canonical_for_odr_type (tree type, tree canonical)
2030 {
2031 odr_type t = get_odr_type (type, false);
2032 unsigned int i;
2033 tree tt;
2034
2035 for (tree t2 = t->type; t2; t2 = TYPE_NEXT_VARIANT (t2))
2036 TYPE_CANONICAL (t2) = canonical;
2037 if (t->types)
2038 FOR_EACH_VEC_ELT (*t->types, i, tt)
2039 for (tree t2 = tt; t2; t2 = TYPE_NEXT_VARIANT (t2))
2040 TYPE_CANONICAL (t2) = canonical;
2041 }
2042
2043 /* Return true if we reported some ODR violation on TYPE. */
2044
2045 bool
2046 odr_type_violation_reported_p (tree type)
2047 {
2048 return get_odr_type (type, false)->odr_violated;
2049 }
2050
2051 /* Add TYPE of ODR type hash. */
2052
2053 void
2054 register_odr_type (tree type)
2055 {
2056 if (!odr_hash)
2057 odr_hash = new odr_hash_type (23);
2058 if (type == TYPE_MAIN_VARIANT (type))
2059 {
2060 /* To get ODR warnings right, first register all sub-types. */
2061 if (RECORD_OR_UNION_TYPE_P (type)
2062 && COMPLETE_TYPE_P (type))
2063 {
2064 /* Limit recursion on types which are already registered. */
2065 odr_type ot = get_odr_type (type, false);
2066 if (ot
2067 && (ot->type == type
2068 || (ot->types_set
2069 && ot->types_set->contains (type))))
2070 return;
2071 for (tree f = TYPE_FIELDS (type); f; f = TREE_CHAIN (f))
2072 if (TREE_CODE (f) == FIELD_DECL)
2073 {
2074 tree subtype = TREE_TYPE (f);
2075
2076 while (TREE_CODE (subtype) == ARRAY_TYPE)
2077 subtype = TREE_TYPE (subtype);
2078 if (type_with_linkage_p (TYPE_MAIN_VARIANT (subtype)))
2079 register_odr_type (TYPE_MAIN_VARIANT (subtype));
2080 }
2081 if (TYPE_BINFO (type))
2082 for (unsigned int i = 0;
2083 i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2084 register_odr_type (BINFO_TYPE (BINFO_BASE_BINFO
2085 (TYPE_BINFO (type), i)));
2086 }
2087 get_odr_type (type, true);
2088 }
2089 }
2090
2091 /* Return true if type is known to have no derivations. */
2092
2093 bool
2094 type_known_to_have_no_derivations_p (tree t)
2095 {
2096 return (type_all_derivations_known_p (t)
2097 && (TYPE_FINAL_P (t)
2098 || (odr_hash
2099 && !get_odr_type (t, true)->derived_types.length())));
2100 }
2101
2102 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2103 recursive printing. */
2104
2105 static void
2106 dump_odr_type (FILE *f, odr_type t, int indent=0)
2107 {
2108 unsigned int i;
2109 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2110 print_generic_expr (f, t->type, TDF_SLIM);
2111 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2112 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2113 if (TYPE_NAME (t->type))
2114 {
2115 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2116 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2117 IDENTIFIER_POINTER
2118 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2119 }
2120 if (t->bases.length ())
2121 {
2122 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2123 for (i = 0; i < t->bases.length (); i++)
2124 fprintf (f, " %i", t->bases[i]->id);
2125 fprintf (f, "\n");
2126 }
2127 if (t->derived_types.length ())
2128 {
2129 fprintf (f, "%*s derived types:\n", indent * 2, "");
2130 for (i = 0; i < t->derived_types.length (); i++)
2131 dump_odr_type (f, t->derived_types[i], indent + 1);
2132 }
2133 fprintf (f, "\n");
2134 }
2135
2136 /* Dump the type inheritance graph. */
2137
2138 static void
2139 dump_type_inheritance_graph (FILE *f)
2140 {
2141 unsigned int i;
2142 unsigned int num_all_types = 0, num_types = 0, num_duplicates = 0;
2143 if (!odr_types_ptr)
2144 return;
2145 fprintf (f, "\n\nType inheritance graph:\n");
2146 for (i = 0; i < odr_types.length (); i++)
2147 {
2148 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2149 dump_odr_type (f, odr_types[i]);
2150 }
2151 for (i = 0; i < odr_types.length (); i++)
2152 {
2153 if (!odr_types[i])
2154 continue;
2155
2156 num_all_types++;
2157 if (!odr_types[i]->types || !odr_types[i]->types->length ())
2158 continue;
2159
2160 /* To aid ODR warnings we also mangle integer constants but do
2161 not consider duplicates there. */
2162 if (TREE_CODE (odr_types[i]->type) == INTEGER_TYPE)
2163 continue;
2164
2165 /* It is normal to have one duplicate and one normal variant. */
2166 if (odr_types[i]->types->length () == 1
2167 && COMPLETE_TYPE_P (odr_types[i]->type)
2168 && !COMPLETE_TYPE_P ((*odr_types[i]->types)[0]))
2169 continue;
2170
2171 num_types ++;
2172
2173 unsigned int j;
2174 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2175 print_node (f, "", odr_types[i]->type, 0);
2176 print_node (f, "", TYPE_NAME (odr_types[i]->type), 0);
2177 putc ('\n',f);
2178 for (j = 0; j < odr_types[i]->types->length (); j++)
2179 {
2180 tree t;
2181 num_duplicates ++;
2182 fprintf (f, "duplicate #%i\n", j);
2183 print_node (f, "", (*odr_types[i]->types)[j], 0);
2184 t = (*odr_types[i]->types)[j];
2185 while (TYPE_P (t) && TYPE_CONTEXT (t))
2186 {
2187 t = TYPE_CONTEXT (t);
2188 print_node (f, "", t, 0);
2189 }
2190 print_node (f, "", TYPE_NAME ((*odr_types[i]->types)[j]), 0);
2191 putc ('\n',f);
2192 }
2193 }
2194 fprintf (f, "Out of %i types there are %i types with duplicates; "
2195 "%i duplicates overall\n", num_all_types, num_types, num_duplicates);
2196 }
2197
2198 /* Save some WPA->ltrans streaming by freeing stuff needed only for good
2199 ODR warnings.
2200 We free TYPE_VALUES of enums and also make TYPE_DECLs to not point back
2201 to the type (which is needed to keep them in the same SCC and preserve
2202 location information to output warnings) and subsequently we make all
2203 TYPE_DECLS of same assembler name equivalent. */
2204
2205 static void
2206 free_odr_warning_data ()
2207 {
2208 static bool odr_data_freed = false;
2209
2210 if (odr_data_freed || !flag_wpa || !odr_types_ptr)
2211 return;
2212
2213 odr_data_freed = true;
2214
2215 for (unsigned int i = 0; i < odr_types.length (); i++)
2216 if (odr_types[i])
2217 {
2218 tree t = odr_types[i]->type;
2219
2220 if (TREE_CODE (t) == ENUMERAL_TYPE)
2221 TYPE_VALUES (t) = NULL;
2222 TREE_TYPE (TYPE_NAME (t)) = void_type_node;
2223
2224 if (odr_types[i]->types)
2225 for (unsigned int j = 0; j < odr_types[i]->types->length (); j++)
2226 {
2227 tree td = (*odr_types[i]->types)[j];
2228
2229 if (TREE_CODE (td) == ENUMERAL_TYPE)
2230 TYPE_VALUES (td) = NULL;
2231 TYPE_NAME (td) = TYPE_NAME (t);
2232 }
2233 }
2234 odr_data_freed = true;
2235 }
2236
2237 /* Initialize IPA devirt and build inheritance tree graph. */
2238
2239 void
2240 build_type_inheritance_graph (void)
2241 {
2242 struct symtab_node *n;
2243 FILE *inheritance_dump_file;
2244 dump_flags_t flags;
2245
2246 if (odr_hash)
2247 {
2248 free_odr_warning_data ();
2249 return;
2250 }
2251 timevar_push (TV_IPA_INHERITANCE);
2252 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2253 odr_hash = new odr_hash_type (23);
2254
2255 /* We reconstruct the graph starting of types of all methods seen in the
2256 unit. */
2257 FOR_EACH_SYMBOL (n)
2258 if (is_a <cgraph_node *> (n)
2259 && DECL_VIRTUAL_P (n->decl)
2260 && n->real_symbol_p ())
2261 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2262
2263 /* Look also for virtual tables of types that do not define any methods.
2264
2265 We need it in a case where class B has virtual base of class A
2266 re-defining its virtual method and there is class C with no virtual
2267 methods with B as virtual base.
2268
2269 Here we output B's virtual method in two variant - for non-virtual
2270 and virtual inheritance. B's virtual table has non-virtual version,
2271 while C's has virtual.
2272
2273 For this reason we need to know about C in order to include both
2274 variants of B. More correctly, record_target_from_binfo should
2275 add both variants of the method when walking B, but we have no
2276 link in between them.
2277
2278 We rely on fact that either the method is exported and thus we
2279 assume it is called externally or C is in anonymous namespace and
2280 thus we will see the vtable. */
2281
2282 else if (is_a <varpool_node *> (n)
2283 && DECL_VIRTUAL_P (n->decl)
2284 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2285 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2286 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2287 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2288 if (inheritance_dump_file)
2289 {
2290 dump_type_inheritance_graph (inheritance_dump_file);
2291 dump_end (TDI_inheritance, inheritance_dump_file);
2292 }
2293 free_odr_warning_data ();
2294 timevar_pop (TV_IPA_INHERITANCE);
2295 }
2296
2297 /* Return true if N has reference from live virtual table
2298 (and thus can be a destination of polymorphic call).
2299 Be conservatively correct when callgraph is not built or
2300 if the method may be referred externally. */
2301
2302 static bool
2303 referenced_from_vtable_p (struct cgraph_node *node)
2304 {
2305 int i;
2306 struct ipa_ref *ref;
2307 bool found = false;
2308
2309 if (node->externally_visible
2310 || DECL_EXTERNAL (node->decl)
2311 || node->used_from_other_partition)
2312 return true;
2313
2314 /* Keep this test constant time.
2315 It is unlikely this can happen except for the case where speculative
2316 devirtualization introduced many speculative edges to this node.
2317 In this case the target is very likely alive anyway. */
2318 if (node->ref_list.referring.length () > 100)
2319 return true;
2320
2321 /* We need references built. */
2322 if (symtab->state <= CONSTRUCTION)
2323 return true;
2324
2325 for (i = 0; node->iterate_referring (i, ref); i++)
2326 if ((ref->use == IPA_REF_ALIAS
2327 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2328 || (ref->use == IPA_REF_ADDR
2329 && VAR_P (ref->referring->decl)
2330 && DECL_VIRTUAL_P (ref->referring->decl)))
2331 {
2332 found = true;
2333 break;
2334 }
2335 return found;
2336 }
2337
2338 /* Return if TARGET is cxa_pure_virtual. */
2339
2340 static bool
2341 is_cxa_pure_virtual_p (tree target)
2342 {
2343 return target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE
2344 && DECL_NAME (target)
2345 && id_equal (DECL_NAME (target),
2346 "__cxa_pure_virtual");
2347 }
2348
2349 /* If TARGET has associated node, record it in the NODES array.
2350 CAN_REFER specify if program can refer to the target directly.
2351 if TARGET is unknown (NULL) or it cannot be inserted (for example because
2352 its body was already removed and there is no way to refer to it), clear
2353 COMPLETEP. */
2354
2355 static void
2356 maybe_record_node (vec <cgraph_node *> &nodes,
2357 tree target, hash_set<tree> *inserted,
2358 bool can_refer,
2359 bool *completep)
2360 {
2361 struct cgraph_node *target_node, *alias_target;
2362 enum availability avail;
2363 bool pure_virtual = is_cxa_pure_virtual_p (target);
2364
2365 /* __builtin_unreachable do not need to be added into
2366 list of targets; the runtime effect of calling them is undefined.
2367 Only "real" virtual methods should be accounted. */
2368 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE && !pure_virtual)
2369 return;
2370
2371 if (!can_refer)
2372 {
2373 /* The only case when method of anonymous namespace becomes unreferable
2374 is when we completely optimized it out. */
2375 if (flag_ltrans
2376 || !target
2377 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2378 *completep = false;
2379 return;
2380 }
2381
2382 if (!target)
2383 return;
2384
2385 target_node = cgraph_node::get (target);
2386
2387 /* Prefer alias target over aliases, so we do not get confused by
2388 fake duplicates. */
2389 if (target_node)
2390 {
2391 alias_target = target_node->ultimate_alias_target (&avail);
2392 if (target_node != alias_target
2393 && avail >= AVAIL_AVAILABLE
2394 && target_node->get_availability ())
2395 target_node = alias_target;
2396 }
2397
2398 /* Method can only be called by polymorphic call if any
2399 of vtables referring to it are alive.
2400
2401 While this holds for non-anonymous functions, too, there are
2402 cases where we want to keep them in the list; for example
2403 inline functions with -fno-weak are static, but we still
2404 may devirtualize them when instance comes from other unit.
2405 The same holds for LTO.
2406
2407 Currently we ignore these functions in speculative devirtualization.
2408 ??? Maybe it would make sense to be more aggressive for LTO even
2409 elsewhere. */
2410 if (!flag_ltrans
2411 && !pure_virtual
2412 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2413 && (!target_node
2414 || !referenced_from_vtable_p (target_node)))
2415 ;
2416 /* See if TARGET is useful function we can deal with. */
2417 else if (target_node != NULL
2418 && (TREE_PUBLIC (target)
2419 || DECL_EXTERNAL (target)
2420 || target_node->definition)
2421 && target_node->real_symbol_p ())
2422 {
2423 gcc_assert (!target_node->inlined_to);
2424 gcc_assert (target_node->real_symbol_p ());
2425 /* When sanitizing, do not assume that __cxa_pure_virtual is not called
2426 by valid program. */
2427 if (flag_sanitize & SANITIZE_UNREACHABLE)
2428 ;
2429 /* Only add pure virtual if it is the only possible target. This way
2430 we will preserve the diagnostics about pure virtual called in many
2431 cases without disabling optimization in other. */
2432 else if (pure_virtual)
2433 {
2434 if (nodes.length ())
2435 return;
2436 }
2437 /* If we found a real target, take away cxa_pure_virtual. */
2438 else if (!pure_virtual && nodes.length () == 1
2439 && is_cxa_pure_virtual_p (nodes[0]->decl))
2440 nodes.pop ();
2441 if (pure_virtual && nodes.length ())
2442 return;
2443 if (!inserted->add (target))
2444 {
2445 cached_polymorphic_call_targets->add (target_node);
2446 nodes.safe_push (target_node);
2447 }
2448 }
2449 else if (!completep)
2450 ;
2451 /* We have definition of __cxa_pure_virtual that is not accessible (it is
2452 optimized out or partitioned to other unit) so we cannot add it. When
2453 not sanitizing, there is nothing to do.
2454 Otherwise declare the list incomplete. */
2455 else if (pure_virtual)
2456 {
2457 if (flag_sanitize & SANITIZE_UNREACHABLE)
2458 *completep = false;
2459 }
2460 else if (flag_ltrans
2461 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2462 *completep = false;
2463 }
2464
2465 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2466 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2467 method in vtable and insert method to NODES array
2468 or BASES_TO_CONSIDER if this array is non-NULL.
2469 Otherwise recurse to base BINFOs.
2470 This matches what get_binfo_at_offset does, but with offset
2471 being unknown.
2472
2473 TYPE_BINFOS is a stack of BINFOS of types with defined
2474 virtual table seen on way from class type to BINFO.
2475
2476 MATCHED_VTABLES tracks virtual tables we already did lookup
2477 for virtual function in. INSERTED tracks nodes we already
2478 inserted.
2479
2480 ANONYMOUS is true if BINFO is part of anonymous namespace.
2481
2482 Clear COMPLETEP when we hit unreferable target.
2483 */
2484
2485 static void
2486 record_target_from_binfo (vec <cgraph_node *> &nodes,
2487 vec <tree> *bases_to_consider,
2488 tree binfo,
2489 tree otr_type,
2490 vec <tree> &type_binfos,
2491 HOST_WIDE_INT otr_token,
2492 tree outer_type,
2493 HOST_WIDE_INT offset,
2494 hash_set<tree> *inserted,
2495 hash_set<tree> *matched_vtables,
2496 bool anonymous,
2497 bool *completep)
2498 {
2499 tree type = BINFO_TYPE (binfo);
2500 int i;
2501 tree base_binfo;
2502
2503
2504 if (BINFO_VTABLE (binfo))
2505 type_binfos.safe_push (binfo);
2506 if (types_same_for_odr (type, outer_type))
2507 {
2508 int i;
2509 tree type_binfo = NULL;
2510
2511 /* Look up BINFO with virtual table. For normal types it is always last
2512 binfo on stack. */
2513 for (i = type_binfos.length () - 1; i >= 0; i--)
2514 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2515 {
2516 type_binfo = type_binfos[i];
2517 break;
2518 }
2519 if (BINFO_VTABLE (binfo))
2520 type_binfos.pop ();
2521 /* If this is duplicated BINFO for base shared by virtual inheritance,
2522 we may not have its associated vtable. This is not a problem, since
2523 we will walk it on the other path. */
2524 if (!type_binfo)
2525 return;
2526 tree inner_binfo = get_binfo_at_offset (type_binfo,
2527 offset, otr_type);
2528 if (!inner_binfo)
2529 {
2530 gcc_assert (odr_violation_reported);
2531 return;
2532 }
2533 /* For types in anonymous namespace first check if the respective vtable
2534 is alive. If not, we know the type can't be called. */
2535 if (!flag_ltrans && anonymous)
2536 {
2537 tree vtable = BINFO_VTABLE (inner_binfo);
2538 varpool_node *vnode;
2539
2540 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2541 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2542 vnode = varpool_node::get (vtable);
2543 if (!vnode || !vnode->definition)
2544 return;
2545 }
2546 gcc_assert (inner_binfo);
2547 if (bases_to_consider
2548 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2549 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2550 {
2551 bool can_refer;
2552 tree target = gimple_get_virt_method_for_binfo (otr_token,
2553 inner_binfo,
2554 &can_refer);
2555 if (!bases_to_consider)
2556 maybe_record_node (nodes, target, inserted, can_refer, completep);
2557 /* Destructors are never called via construction vtables. */
2558 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2559 bases_to_consider->safe_push (target);
2560 }
2561 return;
2562 }
2563
2564 /* Walk bases. */
2565 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2566 /* Walking bases that have no virtual method is pointless exercise. */
2567 if (polymorphic_type_binfo_p (base_binfo))
2568 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2569 type_binfos,
2570 otr_token, outer_type, offset, inserted,
2571 matched_vtables, anonymous, completep);
2572 if (BINFO_VTABLE (binfo))
2573 type_binfos.pop ();
2574 }
2575
2576 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2577 of TYPE, insert them to NODES, recurse into derived nodes.
2578 INSERTED is used to avoid duplicate insertions of methods into NODES.
2579 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2580 Clear COMPLETEP if unreferable target is found.
2581
2582 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2583 all cases where BASE_SKIPPED is true (because the base is abstract
2584 class). */
2585
2586 static void
2587 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2588 hash_set<tree> *inserted,
2589 hash_set<tree> *matched_vtables,
2590 tree otr_type,
2591 odr_type type,
2592 HOST_WIDE_INT otr_token,
2593 tree outer_type,
2594 HOST_WIDE_INT offset,
2595 bool *completep,
2596 vec <tree> &bases_to_consider,
2597 bool consider_construction)
2598 {
2599 tree binfo = TYPE_BINFO (type->type);
2600 unsigned int i;
2601 auto_vec <tree, 8> type_binfos;
2602 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2603
2604 /* We may need to consider types w/o instances because of possible derived
2605 types using their methods either directly or via construction vtables.
2606 We are safe to skip them when all derivations are known, since we will
2607 handle them later.
2608 This is done by recording them to BASES_TO_CONSIDER array. */
2609 if (possibly_instantiated || consider_construction)
2610 {
2611 record_target_from_binfo (nodes,
2612 (!possibly_instantiated
2613 && type_all_derivations_known_p (type->type))
2614 ? &bases_to_consider : NULL,
2615 binfo, otr_type, type_binfos, otr_token,
2616 outer_type, offset,
2617 inserted, matched_vtables,
2618 type->anonymous_namespace, completep);
2619 }
2620 for (i = 0; i < type->derived_types.length (); i++)
2621 possible_polymorphic_call_targets_1 (nodes, inserted,
2622 matched_vtables,
2623 otr_type,
2624 type->derived_types[i],
2625 otr_token, outer_type, offset, completep,
2626 bases_to_consider, consider_construction);
2627 }
2628
2629 /* Cache of queries for polymorphic call targets.
2630
2631 Enumerating all call targets may get expensive when there are many
2632 polymorphic calls in the program, so we memoize all the previous
2633 queries and avoid duplicated work. */
2634
2635 class polymorphic_call_target_d
2636 {
2637 public:
2638 HOST_WIDE_INT otr_token;
2639 ipa_polymorphic_call_context context;
2640 odr_type type;
2641 vec <cgraph_node *> targets;
2642 tree decl_warning;
2643 int type_warning;
2644 unsigned int n_odr_types;
2645 bool complete;
2646 bool speculative;
2647 };
2648
2649 /* Polymorphic call target cache helpers. */
2650
2651 struct polymorphic_call_target_hasher
2652 : pointer_hash <polymorphic_call_target_d>
2653 {
2654 static inline hashval_t hash (const polymorphic_call_target_d *);
2655 static inline bool equal (const polymorphic_call_target_d *,
2656 const polymorphic_call_target_d *);
2657 static inline void remove (polymorphic_call_target_d *);
2658 };
2659
2660 /* Return the computed hashcode for ODR_QUERY. */
2661
2662 inline hashval_t
2663 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2664 {
2665 inchash::hash hstate (odr_query->otr_token);
2666
2667 hstate.add_hwi (odr_query->type->id);
2668 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2669 hstate.add_hwi (odr_query->context.offset);
2670 hstate.add_hwi (odr_query->n_odr_types);
2671
2672 if (odr_query->context.speculative_outer_type)
2673 {
2674 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2675 hstate.add_hwi (odr_query->context.speculative_offset);
2676 }
2677 hstate.add_flag (odr_query->speculative);
2678 hstate.add_flag (odr_query->context.maybe_in_construction);
2679 hstate.add_flag (odr_query->context.maybe_derived_type);
2680 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2681 hstate.commit_flag ();
2682 return hstate.end ();
2683 }
2684
2685 /* Compare cache entries T1 and T2. */
2686
2687 inline bool
2688 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2689 const polymorphic_call_target_d *t2)
2690 {
2691 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2692 && t1->speculative == t2->speculative
2693 && t1->context.offset == t2->context.offset
2694 && t1->context.speculative_offset == t2->context.speculative_offset
2695 && t1->context.outer_type == t2->context.outer_type
2696 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2697 && t1->context.maybe_in_construction
2698 == t2->context.maybe_in_construction
2699 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2700 && (t1->context.speculative_maybe_derived_type
2701 == t2->context.speculative_maybe_derived_type)
2702 /* Adding new type may affect outcome of target search. */
2703 && t1->n_odr_types == t2->n_odr_types);
2704 }
2705
2706 /* Remove entry in polymorphic call target cache hash. */
2707
2708 inline void
2709 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2710 {
2711 v->targets.release ();
2712 free (v);
2713 }
2714
2715 /* Polymorphic call target query cache. */
2716
2717 typedef hash_table<polymorphic_call_target_hasher>
2718 polymorphic_call_target_hash_type;
2719 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2720
2721 /* Destroy polymorphic call target query cache. */
2722
2723 static void
2724 free_polymorphic_call_targets_hash ()
2725 {
2726 if (cached_polymorphic_call_targets)
2727 {
2728 delete polymorphic_call_target_hash;
2729 polymorphic_call_target_hash = NULL;
2730 delete cached_polymorphic_call_targets;
2731 cached_polymorphic_call_targets = NULL;
2732 }
2733 }
2734
2735 /* Force rebuilding type inheritance graph from scratch.
2736 This is use to make sure that we do not keep references to types
2737 which was not visible to free_lang_data. */
2738
2739 void
2740 rebuild_type_inheritance_graph ()
2741 {
2742 if (!odr_hash)
2743 return;
2744 delete odr_hash;
2745 odr_hash = NULL;
2746 odr_types_ptr = NULL;
2747 free_polymorphic_call_targets_hash ();
2748 }
2749
2750 /* When virtual function is removed, we may need to flush the cache. */
2751
2752 static void
2753 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2754 {
2755 if (cached_polymorphic_call_targets
2756 && !thunk_expansion
2757 && cached_polymorphic_call_targets->contains (n))
2758 free_polymorphic_call_targets_hash ();
2759 }
2760
2761 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2762
2763 tree
2764 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2765 tree vtable)
2766 {
2767 tree v = BINFO_VTABLE (binfo);
2768 int i;
2769 tree base_binfo;
2770 unsigned HOST_WIDE_INT this_offset;
2771
2772 if (v)
2773 {
2774 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2775 gcc_unreachable ();
2776
2777 if (offset == this_offset
2778 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2779 return binfo;
2780 }
2781
2782 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2783 if (polymorphic_type_binfo_p (base_binfo))
2784 {
2785 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2786 if (base_binfo)
2787 return base_binfo;
2788 }
2789 return NULL;
2790 }
2791
2792 /* T is known constant value of virtual table pointer.
2793 Store virtual table to V and its offset to OFFSET.
2794 Return false if T does not look like virtual table reference. */
2795
2796 bool
2797 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2798 unsigned HOST_WIDE_INT *offset)
2799 {
2800 /* We expect &MEM[(void *)&virtual_table + 16B].
2801 We obtain object's BINFO from the context of the virtual table.
2802 This one contains pointer to virtual table represented via
2803 POINTER_PLUS_EXPR. Verify that this pointer matches what
2804 we propagated through.
2805
2806 In the case of virtual inheritance, the virtual tables may
2807 be nested, i.e. the offset may be different from 16 and we may
2808 need to dive into the type representation. */
2809 if (TREE_CODE (t) == ADDR_EXPR
2810 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2811 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2812 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2813 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2814 == VAR_DECL)
2815 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2816 (TREE_OPERAND (t, 0), 0), 0)))
2817 {
2818 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2819 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2820 return true;
2821 }
2822
2823 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2824 We need to handle it when T comes from static variable initializer or
2825 BINFO. */
2826 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2827 {
2828 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2829 t = TREE_OPERAND (t, 0);
2830 }
2831 else
2832 *offset = 0;
2833
2834 if (TREE_CODE (t) != ADDR_EXPR)
2835 return false;
2836 *v = TREE_OPERAND (t, 0);
2837 return true;
2838 }
2839
2840 /* T is known constant value of virtual table pointer. Return BINFO of the
2841 instance type. */
2842
2843 tree
2844 vtable_pointer_value_to_binfo (const_tree t)
2845 {
2846 tree vtable;
2847 unsigned HOST_WIDE_INT offset;
2848
2849 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2850 return NULL_TREE;
2851
2852 /* FIXME: for stores of construction vtables we return NULL,
2853 because we do not have BINFO for those. Eventually we should fix
2854 our representation to allow this case to be handled, too.
2855 In the case we see store of BINFO we however may assume
2856 that standard folding will be able to cope with it. */
2857 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2858 offset, vtable);
2859 }
2860
2861 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2862 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2863 and insert them in NODES.
2864
2865 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2866
2867 static void
2868 record_targets_from_bases (tree otr_type,
2869 HOST_WIDE_INT otr_token,
2870 tree outer_type,
2871 HOST_WIDE_INT offset,
2872 vec <cgraph_node *> &nodes,
2873 hash_set<tree> *inserted,
2874 hash_set<tree> *matched_vtables,
2875 bool *completep)
2876 {
2877 while (true)
2878 {
2879 HOST_WIDE_INT pos, size;
2880 tree base_binfo;
2881 tree fld;
2882
2883 if (types_same_for_odr (outer_type, otr_type))
2884 return;
2885
2886 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2887 {
2888 if (TREE_CODE (fld) != FIELD_DECL)
2889 continue;
2890
2891 pos = int_bit_position (fld);
2892 size = tree_to_shwi (DECL_SIZE (fld));
2893 if (pos <= offset && (pos + size) > offset
2894 /* Do not get confused by zero sized bases. */
2895 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2896 break;
2897 }
2898 /* Within a class type we should always find corresponding fields. */
2899 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2900
2901 /* Nonbase types should have been stripped by outer_class_type. */
2902 gcc_assert (DECL_ARTIFICIAL (fld));
2903
2904 outer_type = TREE_TYPE (fld);
2905 offset -= pos;
2906
2907 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2908 offset, otr_type);
2909 if (!base_binfo)
2910 {
2911 gcc_assert (odr_violation_reported);
2912 return;
2913 }
2914 gcc_assert (base_binfo);
2915 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2916 {
2917 bool can_refer;
2918 tree target = gimple_get_virt_method_for_binfo (otr_token,
2919 base_binfo,
2920 &can_refer);
2921 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2922 maybe_record_node (nodes, target, inserted, can_refer, completep);
2923 matched_vtables->add (BINFO_VTABLE (base_binfo));
2924 }
2925 }
2926 }
2927
2928 /* When virtual table is removed, we may need to flush the cache. */
2929
2930 static void
2931 devirt_variable_node_removal_hook (varpool_node *n,
2932 void *d ATTRIBUTE_UNUSED)
2933 {
2934 if (cached_polymorphic_call_targets
2935 && DECL_VIRTUAL_P (n->decl)
2936 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2937 free_polymorphic_call_targets_hash ();
2938 }
2939
2940 /* Record about how many calls would benefit from given type to be final. */
2941
2942 struct odr_type_warn_count
2943 {
2944 tree type;
2945 int count;
2946 profile_count dyn_count;
2947 };
2948
2949 /* Record about how many calls would benefit from given method to be final. */
2950
2951 struct decl_warn_count
2952 {
2953 tree decl;
2954 int count;
2955 profile_count dyn_count;
2956 };
2957
2958 /* Information about type and decl warnings. */
2959
2960 class final_warning_record
2961 {
2962 public:
2963 /* If needed grow type_warnings vector and initialize new decl_warn_count
2964 to have dyn_count set to profile_count::zero (). */
2965 void grow_type_warnings (unsigned newlen);
2966
2967 profile_count dyn_count;
2968 auto_vec<odr_type_warn_count> type_warnings;
2969 hash_map<tree, decl_warn_count> decl_warnings;
2970 };
2971
2972 void
2973 final_warning_record::grow_type_warnings (unsigned newlen)
2974 {
2975 unsigned len = type_warnings.length ();
2976 if (newlen > len)
2977 {
2978 type_warnings.safe_grow_cleared (newlen);
2979 for (unsigned i = len; i < newlen; i++)
2980 type_warnings[i].dyn_count = profile_count::zero ();
2981 }
2982 }
2983
2984 class final_warning_record *final_warning_records;
2985
2986 /* Return vector containing possible targets of polymorphic call of type
2987 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2988 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2989 OTR_TYPE and include their virtual method. This is useful for types
2990 possibly in construction or destruction where the virtual table may
2991 temporarily change to one of base types. INCLUDE_DERIVED_TYPES make
2992 us to walk the inheritance graph for all derivations.
2993
2994 If COMPLETEP is non-NULL, store true if the list is complete.
2995 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2996 in the target cache. If user needs to visit every target list
2997 just once, it can memoize them.
2998
2999 If SPECULATIVE is set, the list will not contain targets that
3000 are not speculatively taken.
3001
3002 Returned vector is placed into cache. It is NOT caller's responsibility
3003 to free it. The vector can be freed on cgraph_remove_node call if
3004 the particular node is a virtual function present in the cache. */
3005
3006 vec <cgraph_node *>
3007 possible_polymorphic_call_targets (tree otr_type,
3008 HOST_WIDE_INT otr_token,
3009 ipa_polymorphic_call_context context,
3010 bool *completep,
3011 void **cache_token,
3012 bool speculative)
3013 {
3014 static struct cgraph_node_hook_list *node_removal_hook_holder;
3015 vec <cgraph_node *> nodes = vNULL;
3016 auto_vec <tree, 8> bases_to_consider;
3017 odr_type type, outer_type;
3018 polymorphic_call_target_d key;
3019 polymorphic_call_target_d **slot;
3020 unsigned int i;
3021 tree binfo, target;
3022 bool complete;
3023 bool can_refer = false;
3024 bool skipped = false;
3025
3026 otr_type = TYPE_MAIN_VARIANT (otr_type);
3027
3028 /* If ODR is not initialized or the context is invalid, return empty
3029 incomplete list. */
3030 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3031 {
3032 if (completep)
3033 *completep = context.invalid;
3034 if (cache_token)
3035 *cache_token = NULL;
3036 return nodes;
3037 }
3038
3039 /* Do not bother to compute speculative info when user do not asks for it. */
3040 if (!speculative || !context.speculative_outer_type)
3041 context.clear_speculation ();
3042
3043 type = get_odr_type (otr_type, true);
3044
3045 /* Recording type variants would waste results cache. */
3046 gcc_assert (!context.outer_type
3047 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3048
3049 /* Look up the outer class type we want to walk.
3050 If we fail to do so, the context is invalid. */
3051 if ((context.outer_type || context.speculative_outer_type)
3052 && !context.restrict_to_inner_class (otr_type))
3053 {
3054 if (completep)
3055 *completep = true;
3056 if (cache_token)
3057 *cache_token = NULL;
3058 return nodes;
3059 }
3060 gcc_assert (!context.invalid);
3061
3062 /* Check that restrict_to_inner_class kept the main variant. */
3063 gcc_assert (!context.outer_type
3064 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3065
3066 /* We canonicalize our query, so we do not need extra hashtable entries. */
3067
3068 /* Without outer type, we have no use for offset. Just do the
3069 basic search from inner type. */
3070 if (!context.outer_type)
3071 context.clear_outer_type (otr_type);
3072 /* We need to update our hierarchy if the type does not exist. */
3073 outer_type = get_odr_type (context.outer_type, true);
3074 /* If the type is complete, there are no derivations. */
3075 if (TYPE_FINAL_P (outer_type->type))
3076 context.maybe_derived_type = false;
3077
3078 /* Initialize query cache. */
3079 if (!cached_polymorphic_call_targets)
3080 {
3081 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3082 polymorphic_call_target_hash
3083 = new polymorphic_call_target_hash_type (23);
3084 if (!node_removal_hook_holder)
3085 {
3086 node_removal_hook_holder =
3087 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3088 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3089 NULL);
3090 }
3091 }
3092
3093 if (in_lto_p)
3094 {
3095 if (context.outer_type != otr_type)
3096 context.outer_type
3097 = get_odr_type (context.outer_type, true)->type;
3098 if (context.speculative_outer_type)
3099 context.speculative_outer_type
3100 = get_odr_type (context.speculative_outer_type, true)->type;
3101 }
3102
3103 /* Look up cached answer. */
3104 key.type = type;
3105 key.otr_token = otr_token;
3106 key.speculative = speculative;
3107 key.context = context;
3108 key.n_odr_types = odr_types.length ();
3109 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3110 if (cache_token)
3111 *cache_token = (void *)*slot;
3112 if (*slot)
3113 {
3114 if (completep)
3115 *completep = (*slot)->complete;
3116 if ((*slot)->type_warning && final_warning_records)
3117 {
3118 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3119 if (!final_warning_records->type_warnings
3120 [(*slot)->type_warning - 1].dyn_count.initialized_p ())
3121 final_warning_records->type_warnings
3122 [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
3123 if (final_warning_records->dyn_count > 0)
3124 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3125 = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3126 + final_warning_records->dyn_count;
3127 }
3128 if (!speculative && (*slot)->decl_warning && final_warning_records)
3129 {
3130 struct decl_warn_count *c =
3131 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3132 c->count++;
3133 if (final_warning_records->dyn_count > 0)
3134 c->dyn_count += final_warning_records->dyn_count;
3135 }
3136 return (*slot)->targets;
3137 }
3138
3139 complete = true;
3140
3141 /* Do actual search. */
3142 timevar_push (TV_IPA_VIRTUAL_CALL);
3143 *slot = XCNEW (polymorphic_call_target_d);
3144 if (cache_token)
3145 *cache_token = (void *)*slot;
3146 (*slot)->type = type;
3147 (*slot)->otr_token = otr_token;
3148 (*slot)->context = context;
3149 (*slot)->speculative = speculative;
3150
3151 hash_set<tree> inserted;
3152 hash_set<tree> matched_vtables;
3153
3154 /* First insert targets we speculatively identified as likely. */
3155 if (context.speculative_outer_type)
3156 {
3157 odr_type speculative_outer_type;
3158 bool speculation_complete = true;
3159
3160 /* First insert target from type itself and check if it may have
3161 derived types. */
3162 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3163 if (TYPE_FINAL_P (speculative_outer_type->type))
3164 context.speculative_maybe_derived_type = false;
3165 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3166 context.speculative_offset, otr_type);
3167 if (binfo)
3168 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3169 &can_refer);
3170 else
3171 target = NULL;
3172
3173 /* In the case we get complete method, we don't need
3174 to walk derivations. */
3175 if (target && DECL_FINAL_P (target))
3176 context.speculative_maybe_derived_type = false;
3177 if (type_possibly_instantiated_p (speculative_outer_type->type))
3178 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3179 if (binfo)
3180 matched_vtables.add (BINFO_VTABLE (binfo));
3181
3182
3183 /* Next walk recursively all derived types. */
3184 if (context.speculative_maybe_derived_type)
3185 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3186 possible_polymorphic_call_targets_1 (nodes, &inserted,
3187 &matched_vtables,
3188 otr_type,
3189 speculative_outer_type->derived_types[i],
3190 otr_token, speculative_outer_type->type,
3191 context.speculative_offset,
3192 &speculation_complete,
3193 bases_to_consider,
3194 false);
3195 }
3196
3197 if (!speculative || !nodes.length ())
3198 {
3199 /* First see virtual method of type itself. */
3200 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3201 context.offset, otr_type);
3202 if (binfo)
3203 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3204 &can_refer);
3205 else
3206 {
3207 gcc_assert (odr_violation_reported);
3208 target = NULL;
3209 }
3210
3211 /* Destructors are never called through construction virtual tables,
3212 because the type is always known. */
3213 if (target && DECL_CXX_DESTRUCTOR_P (target))
3214 context.maybe_in_construction = false;
3215
3216 if (target)
3217 {
3218 /* In the case we get complete method, we don't need
3219 to walk derivations. */
3220 if (DECL_FINAL_P (target))
3221 context.maybe_derived_type = false;
3222 }
3223
3224 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3225 if (type_possibly_instantiated_p (outer_type->type))
3226 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3227 else
3228 skipped = true;
3229
3230 if (binfo)
3231 matched_vtables.add (BINFO_VTABLE (binfo));
3232
3233 /* Next walk recursively all derived types. */
3234 if (context.maybe_derived_type)
3235 {
3236 for (i = 0; i < outer_type->derived_types.length(); i++)
3237 possible_polymorphic_call_targets_1 (nodes, &inserted,
3238 &matched_vtables,
3239 otr_type,
3240 outer_type->derived_types[i],
3241 otr_token, outer_type->type,
3242 context.offset, &complete,
3243 bases_to_consider,
3244 context.maybe_in_construction);
3245
3246 if (!outer_type->all_derivations_known)
3247 {
3248 if (!speculative && final_warning_records
3249 && nodes.length () == 1
3250 && TREE_CODE (TREE_TYPE (nodes[0]->decl)) == METHOD_TYPE)
3251 {
3252 if (complete
3253 && warn_suggest_final_types
3254 && !outer_type->derived_types.length ())
3255 {
3256 final_warning_records->grow_type_warnings
3257 (outer_type->id);
3258 final_warning_records->type_warnings[outer_type->id].count++;
3259 if (!final_warning_records->type_warnings
3260 [outer_type->id].dyn_count.initialized_p ())
3261 final_warning_records->type_warnings
3262 [outer_type->id].dyn_count = profile_count::zero ();
3263 final_warning_records->type_warnings[outer_type->id].dyn_count
3264 += final_warning_records->dyn_count;
3265 final_warning_records->type_warnings[outer_type->id].type
3266 = outer_type->type;
3267 (*slot)->type_warning = outer_type->id + 1;
3268 }
3269 if (complete
3270 && warn_suggest_final_methods
3271 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3272 outer_type->type))
3273 {
3274 bool existed;
3275 struct decl_warn_count &c =
3276 final_warning_records->decl_warnings.get_or_insert
3277 (nodes[0]->decl, &existed);
3278
3279 if (existed)
3280 {
3281 c.count++;
3282 c.dyn_count += final_warning_records->dyn_count;
3283 }
3284 else
3285 {
3286 c.count = 1;
3287 c.dyn_count = final_warning_records->dyn_count;
3288 c.decl = nodes[0]->decl;
3289 }
3290 (*slot)->decl_warning = nodes[0]->decl;
3291 }
3292 }
3293 complete = false;
3294 }
3295 }
3296
3297 if (!speculative)
3298 {
3299 /* Destructors are never called through construction virtual tables,
3300 because the type is always known. One of entries may be
3301 cxa_pure_virtual so look to at least two of them. */
3302 if (context.maybe_in_construction)
3303 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3304 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3305 context.maybe_in_construction = false;
3306 if (context.maybe_in_construction)
3307 {
3308 if (type != outer_type
3309 && (!skipped
3310 || (context.maybe_derived_type
3311 && !type_all_derivations_known_p (outer_type->type))))
3312 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3313 context.offset, nodes, &inserted,
3314 &matched_vtables, &complete);
3315 if (skipped)
3316 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3317 for (i = 0; i < bases_to_consider.length(); i++)
3318 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3319 }
3320 }
3321 }
3322
3323 (*slot)->targets = nodes;
3324 (*slot)->complete = complete;
3325 (*slot)->n_odr_types = odr_types.length ();
3326 if (completep)
3327 *completep = complete;
3328
3329 timevar_pop (TV_IPA_VIRTUAL_CALL);
3330 return nodes;
3331 }
3332
3333 bool
3334 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3335 vec<const decl_warn_count*> *vec)
3336 {
3337 vec->safe_push (&value);
3338 return true;
3339 }
3340
3341 /* Dump target list TARGETS into FILE. */
3342
3343 static void
3344 dump_targets (FILE *f, vec <cgraph_node *> targets, bool verbose)
3345 {
3346 unsigned int i;
3347
3348 for (i = 0; i < targets.length (); i++)
3349 {
3350 char *name = NULL;
3351 if (in_lto_p)
3352 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3353 fprintf (f, " %s/%i", name ? name : targets[i]->name (),
3354 targets[i]->order);
3355 if (in_lto_p)
3356 free (name);
3357 if (!targets[i]->definition)
3358 fprintf (f, " (no definition%s)",
3359 DECL_DECLARED_INLINE_P (targets[i]->decl)
3360 ? " inline" : "");
3361 /* With many targets for every call polymorphic dumps are going to
3362 be quadratic in size. */
3363 if (i > 10 && !verbose)
3364 {
3365 fprintf (f, " ... and %i more targets\n", targets.length () - i);
3366 return;
3367 }
3368 }
3369 fprintf (f, "\n");
3370 }
3371
3372 /* Dump all possible targets of a polymorphic call. */
3373
3374 void
3375 dump_possible_polymorphic_call_targets (FILE *f,
3376 tree otr_type,
3377 HOST_WIDE_INT otr_token,
3378 const ipa_polymorphic_call_context &ctx,
3379 bool verbose)
3380 {
3381 vec <cgraph_node *> targets;
3382 bool final;
3383 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3384 unsigned int len;
3385
3386 if (!type)
3387 return;
3388 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3389 ctx,
3390 &final, NULL, false);
3391 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3392 print_generic_expr (f, type->type, TDF_SLIM);
3393 fprintf (f, " token %i\n", (int)otr_token);
3394
3395 ctx.dump (f);
3396
3397 fprintf (f, " %s%s%s%s\n ",
3398 final ? "This is a complete list." :
3399 "This is partial list; extra targets may be defined in other units.",
3400 ctx.maybe_in_construction ? " (base types included)" : "",
3401 ctx.maybe_derived_type ? " (derived types included)" : "",
3402 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3403 len = targets.length ();
3404 dump_targets (f, targets, verbose);
3405
3406 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3407 ctx,
3408 &final, NULL, true);
3409 if (targets.length () != len)
3410 {
3411 fprintf (f, " Speculative targets:");
3412 dump_targets (f, targets, verbose);
3413 }
3414 /* Ugly: during callgraph construction the target cache may get populated
3415 before all targets are found. While this is harmless (because all local
3416 types are discovered and only in those case we devirtualize fully and we
3417 don't do speculative devirtualization before IPA stage) it triggers
3418 assert here when dumping at that stage also populates the case with
3419 speculative targets. Quietly ignore this. */
3420 gcc_assert (symtab->state < IPA_SSA || targets.length () <= len);
3421 fprintf (f, "\n");
3422 }
3423
3424
3425 /* Return true if N can be possibly target of a polymorphic call of
3426 OTR_TYPE/OTR_TOKEN. */
3427
3428 bool
3429 possible_polymorphic_call_target_p (tree otr_type,
3430 HOST_WIDE_INT otr_token,
3431 const ipa_polymorphic_call_context &ctx,
3432 struct cgraph_node *n)
3433 {
3434 vec <cgraph_node *> targets;
3435 unsigned int i;
3436 bool final;
3437
3438 if (fndecl_built_in_p (n->decl, BUILT_IN_UNREACHABLE)
3439 || fndecl_built_in_p (n->decl, BUILT_IN_TRAP))
3440 return true;
3441
3442 if (is_cxa_pure_virtual_p (n->decl))
3443 return true;
3444
3445 if (!odr_hash)
3446 return true;
3447 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3448 for (i = 0; i < targets.length (); i++)
3449 if (n->semantically_equivalent_p (targets[i]))
3450 return true;
3451
3452 /* At a moment we allow middle end to dig out new external declarations
3453 as a targets of polymorphic calls. */
3454 if (!final && !n->definition)
3455 return true;
3456 return false;
3457 }
3458
3459
3460
3461 /* Return true if N can be possibly target of a polymorphic call of
3462 OBJ_TYPE_REF expression REF in STMT. */
3463
3464 bool
3465 possible_polymorphic_call_target_p (tree ref,
3466 gimple *stmt,
3467 struct cgraph_node *n)
3468 {
3469 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3470 tree call_fn = gimple_call_fn (stmt);
3471
3472 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3473 tree_to_uhwi
3474 (OBJ_TYPE_REF_TOKEN (call_fn)),
3475 context,
3476 n);
3477 }
3478
3479
3480 /* After callgraph construction new external nodes may appear.
3481 Add them into the graph. */
3482
3483 void
3484 update_type_inheritance_graph (void)
3485 {
3486 struct cgraph_node *n;
3487
3488 if (!odr_hash)
3489 return;
3490 free_polymorphic_call_targets_hash ();
3491 timevar_push (TV_IPA_INHERITANCE);
3492 /* We reconstruct the graph starting from types of all methods seen in the
3493 unit. */
3494 FOR_EACH_FUNCTION (n)
3495 if (DECL_VIRTUAL_P (n->decl)
3496 && !n->definition
3497 && n->real_symbol_p ())
3498 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3499 timevar_pop (TV_IPA_INHERITANCE);
3500 }
3501
3502
3503 /* Return true if N looks like likely target of a polymorphic call.
3504 Rule out cxa_pure_virtual, noreturns, function declared cold and
3505 other obvious cases. */
3506
3507 bool
3508 likely_target_p (struct cgraph_node *n)
3509 {
3510 int flags;
3511 /* cxa_pure_virtual and similar things are not likely. */
3512 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3513 return false;
3514 flags = flags_from_decl_or_type (n->decl);
3515 if (flags & ECF_NORETURN)
3516 return false;
3517 if (lookup_attribute ("cold",
3518 DECL_ATTRIBUTES (n->decl)))
3519 return false;
3520 if (n->frequency < NODE_FREQUENCY_NORMAL)
3521 return false;
3522 /* If there are no live virtual tables referring the target,
3523 the only way the target can be called is an instance coming from other
3524 compilation unit; speculative devirtualization is built around an
3525 assumption that won't happen. */
3526 if (!referenced_from_vtable_p (n))
3527 return false;
3528 return true;
3529 }
3530
3531 /* Compare type warning records P1 and P2 and choose one with larger count;
3532 helper for qsort. */
3533
3534 static int
3535 type_warning_cmp (const void *p1, const void *p2)
3536 {
3537 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3538 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3539
3540 if (t1->dyn_count < t2->dyn_count)
3541 return 1;
3542 if (t1->dyn_count > t2->dyn_count)
3543 return -1;
3544 return t2->count - t1->count;
3545 }
3546
3547 /* Compare decl warning records P1 and P2 and choose one with larger count;
3548 helper for qsort. */
3549
3550 static int
3551 decl_warning_cmp (const void *p1, const void *p2)
3552 {
3553 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3554 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3555
3556 if (t1->dyn_count < t2->dyn_count)
3557 return 1;
3558 if (t1->dyn_count > t2->dyn_count)
3559 return -1;
3560 return t2->count - t1->count;
3561 }
3562
3563
3564 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3565 context CTX. */
3566
3567 struct cgraph_node *
3568 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3569 ipa_polymorphic_call_context ctx)
3570 {
3571 vec <cgraph_node *>targets
3572 = possible_polymorphic_call_targets
3573 (otr_type, otr_token, ctx, NULL, NULL, true);
3574 unsigned int i;
3575 struct cgraph_node *likely_target = NULL;
3576
3577 for (i = 0; i < targets.length (); i++)
3578 if (likely_target_p (targets[i]))
3579 {
3580 if (likely_target)
3581 return NULL;
3582 likely_target = targets[i];
3583 }
3584 if (!likely_target
3585 ||!likely_target->definition
3586 || DECL_EXTERNAL (likely_target->decl))
3587 return NULL;
3588
3589 /* Don't use an implicitly-declared destructor (c++/58678). */
3590 struct cgraph_node *non_thunk_target
3591 = likely_target->function_symbol ();
3592 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3593 return NULL;
3594 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3595 && likely_target->can_be_discarded_p ())
3596 return NULL;
3597 return likely_target;
3598 }
3599
3600 /* The ipa-devirt pass.
3601 When polymorphic call has only one likely target in the unit,
3602 turn it into a speculative call. */
3603
3604 static unsigned int
3605 ipa_devirt (void)
3606 {
3607 struct cgraph_node *n;
3608 hash_set<void *> bad_call_targets;
3609 struct cgraph_edge *e;
3610
3611 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3612 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3613 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3614 int ndropped = 0;
3615
3616 if (!odr_types_ptr)
3617 return 0;
3618
3619 if (dump_file)
3620 dump_type_inheritance_graph (dump_file);
3621
3622 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3623 This is implemented by setting up final_warning_records that are updated
3624 by get_polymorphic_call_targets.
3625 We need to clear cache in this case to trigger recomputation of all
3626 entries. */
3627 if (warn_suggest_final_methods || warn_suggest_final_types)
3628 {
3629 final_warning_records = new (final_warning_record);
3630 final_warning_records->dyn_count = profile_count::zero ();
3631 final_warning_records->grow_type_warnings (odr_types.length ());
3632 free_polymorphic_call_targets_hash ();
3633 }
3634
3635 FOR_EACH_DEFINED_FUNCTION (n)
3636 {
3637 bool update = false;
3638 if (!opt_for_fn (n->decl, flag_devirtualize))
3639 continue;
3640 if (dump_file && n->indirect_calls)
3641 fprintf (dump_file, "\n\nProcesing function %s\n",
3642 n->dump_name ());
3643 for (e = n->indirect_calls; e; e = e->next_callee)
3644 if (e->indirect_info->polymorphic)
3645 {
3646 struct cgraph_node *likely_target = NULL;
3647 void *cache_token;
3648 bool final;
3649
3650 if (final_warning_records)
3651 final_warning_records->dyn_count = e->count.ipa ();
3652
3653 vec <cgraph_node *>targets
3654 = possible_polymorphic_call_targets
3655 (e, &final, &cache_token, true);
3656 unsigned int i;
3657
3658 /* Trigger warnings by calculating non-speculative targets. */
3659 if (warn_suggest_final_methods || warn_suggest_final_types)
3660 possible_polymorphic_call_targets (e);
3661
3662 if (dump_file)
3663 dump_possible_polymorphic_call_targets
3664 (dump_file, e, (dump_flags & TDF_DETAILS));
3665
3666 npolymorphic++;
3667
3668 /* See if the call can be devirtualized by means of ipa-prop's
3669 polymorphic call context propagation. If not, we can just
3670 forget about this call being polymorphic and avoid some heavy
3671 lifting in remove_unreachable_nodes that will otherwise try to
3672 keep all possible targets alive until inlining and in the inliner
3673 itself.
3674
3675 This may need to be revisited once we add further ways to use
3676 the may edges, but it is a reasonable thing to do right now. */
3677
3678 if ((e->indirect_info->param_index == -1
3679 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3680 && e->indirect_info->vptr_changed))
3681 && !flag_ltrans_devirtualize)
3682 {
3683 e->indirect_info->polymorphic = false;
3684 ndropped++;
3685 if (dump_file)
3686 fprintf (dump_file, "Dropping polymorphic call info;"
3687 " it cannot be used by ipa-prop\n");
3688 }
3689
3690 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3691 continue;
3692
3693 if (!e->maybe_hot_p ())
3694 {
3695 if (dump_file)
3696 fprintf (dump_file, "Call is cold\n\n");
3697 ncold++;
3698 continue;
3699 }
3700 if (e->speculative)
3701 {
3702 if (dump_file)
3703 fprintf (dump_file, "Call is already speculated\n\n");
3704 nspeculated++;
3705
3706 /* When dumping see if we agree with speculation. */
3707 if (!dump_file)
3708 continue;
3709 }
3710 if (bad_call_targets.contains (cache_token))
3711 {
3712 if (dump_file)
3713 fprintf (dump_file, "Target list is known to be useless\n\n");
3714 nmultiple++;
3715 continue;
3716 }
3717 for (i = 0; i < targets.length (); i++)
3718 if (likely_target_p (targets[i]))
3719 {
3720 if (likely_target)
3721 {
3722 likely_target = NULL;
3723 if (dump_file)
3724 fprintf (dump_file, "More than one likely target\n\n");
3725 nmultiple++;
3726 break;
3727 }
3728 likely_target = targets[i];
3729 }
3730 if (!likely_target)
3731 {
3732 bad_call_targets.add (cache_token);
3733 continue;
3734 }
3735 /* This is reached only when dumping; check if we agree or disagree
3736 with the speculation. */
3737 if (e->speculative)
3738 {
3739 struct cgraph_edge *e2;
3740 struct ipa_ref *ref;
3741 e->speculative_call_info (e2, e, ref);
3742 if (e2->callee->ultimate_alias_target ()
3743 == likely_target->ultimate_alias_target ())
3744 {
3745 fprintf (dump_file, "We agree with speculation\n\n");
3746 nok++;
3747 }
3748 else
3749 {
3750 fprintf (dump_file, "We disagree with speculation\n\n");
3751 nwrong++;
3752 }
3753 continue;
3754 }
3755 if (!likely_target->definition)
3756 {
3757 if (dump_file)
3758 fprintf (dump_file, "Target is not a definition\n\n");
3759 nnotdefined++;
3760 continue;
3761 }
3762 /* Do not introduce new references to external symbols. While we
3763 can handle these just well, it is common for programs to
3764 incorrectly with headers defining methods they are linked
3765 with. */
3766 if (DECL_EXTERNAL (likely_target->decl))
3767 {
3768 if (dump_file)
3769 fprintf (dump_file, "Target is external\n\n");
3770 nexternal++;
3771 continue;
3772 }
3773 /* Don't use an implicitly-declared destructor (c++/58678). */
3774 struct cgraph_node *non_thunk_target
3775 = likely_target->function_symbol ();
3776 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3777 {
3778 if (dump_file)
3779 fprintf (dump_file, "Target is artificial\n\n");
3780 nartificial++;
3781 continue;
3782 }
3783 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3784 && likely_target->can_be_discarded_p ())
3785 {
3786 if (dump_file)
3787 fprintf (dump_file, "Target is overwritable\n\n");
3788 noverwritable++;
3789 continue;
3790 }
3791 else if (dbg_cnt (devirt))
3792 {
3793 if (dump_enabled_p ())
3794 {
3795 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, e->call_stmt,
3796 "speculatively devirtualizing call "
3797 "in %s to %s\n",
3798 n->dump_name (),
3799 likely_target->dump_name ());
3800 }
3801 if (!likely_target->can_be_discarded_p ())
3802 {
3803 cgraph_node *alias;
3804 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3805 if (alias)
3806 likely_target = alias;
3807 }
3808 nconverted++;
3809 update = true;
3810 e->make_speculative
3811 (likely_target, e->count.apply_scale (8, 10));
3812 }
3813 }
3814 if (update)
3815 ipa_update_overall_fn_summary (n);
3816 }
3817 if (warn_suggest_final_methods || warn_suggest_final_types)
3818 {
3819 if (warn_suggest_final_types)
3820 {
3821 final_warning_records->type_warnings.qsort (type_warning_cmp);
3822 for (unsigned int i = 0;
3823 i < final_warning_records->type_warnings.length (); i++)
3824 if (final_warning_records->type_warnings[i].count)
3825 {
3826 tree type = final_warning_records->type_warnings[i].type;
3827 int count = final_warning_records->type_warnings[i].count;
3828 profile_count dyn_count
3829 = final_warning_records->type_warnings[i].dyn_count;
3830
3831 if (!(dyn_count > 0))
3832 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3833 OPT_Wsuggest_final_types, count,
3834 "Declaring type %qD final "
3835 "would enable devirtualization of %i call",
3836 "Declaring type %qD final "
3837 "would enable devirtualization of %i calls",
3838 type,
3839 count);
3840 else
3841 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3842 OPT_Wsuggest_final_types, count,
3843 "Declaring type %qD final "
3844 "would enable devirtualization of %i call "
3845 "executed %lli times",
3846 "Declaring type %qD final "
3847 "would enable devirtualization of %i calls "
3848 "executed %lli times",
3849 type,
3850 count,
3851 (long long) dyn_count.to_gcov_type ());
3852 }
3853 }
3854
3855 if (warn_suggest_final_methods)
3856 {
3857 auto_vec<const decl_warn_count*> decl_warnings_vec;
3858
3859 final_warning_records->decl_warnings.traverse
3860 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3861 decl_warnings_vec.qsort (decl_warning_cmp);
3862 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3863 {
3864 tree decl = decl_warnings_vec[i]->decl;
3865 int count = decl_warnings_vec[i]->count;
3866 profile_count dyn_count
3867 = decl_warnings_vec[i]->dyn_count;
3868
3869 if (!(dyn_count > 0))
3870 if (DECL_CXX_DESTRUCTOR_P (decl))
3871 warning_n (DECL_SOURCE_LOCATION (decl),
3872 OPT_Wsuggest_final_methods, count,
3873 "Declaring virtual destructor of %qD final "
3874 "would enable devirtualization of %i call",
3875 "Declaring virtual destructor of %qD final "
3876 "would enable devirtualization of %i calls",
3877 DECL_CONTEXT (decl), count);
3878 else
3879 warning_n (DECL_SOURCE_LOCATION (decl),
3880 OPT_Wsuggest_final_methods, count,
3881 "Declaring method %qD final "
3882 "would enable devirtualization of %i call",
3883 "Declaring method %qD final "
3884 "would enable devirtualization of %i calls",
3885 decl, count);
3886 else if (DECL_CXX_DESTRUCTOR_P (decl))
3887 warning_n (DECL_SOURCE_LOCATION (decl),
3888 OPT_Wsuggest_final_methods, count,
3889 "Declaring virtual destructor of %qD final "
3890 "would enable devirtualization of %i call "
3891 "executed %lli times",
3892 "Declaring virtual destructor of %qD final "
3893 "would enable devirtualization of %i calls "
3894 "executed %lli times",
3895 DECL_CONTEXT (decl), count,
3896 (long long)dyn_count.to_gcov_type ());
3897 else
3898 warning_n (DECL_SOURCE_LOCATION (decl),
3899 OPT_Wsuggest_final_methods, count,
3900 "Declaring method %qD final "
3901 "would enable devirtualization of %i call "
3902 "executed %lli times",
3903 "Declaring method %qD final "
3904 "would enable devirtualization of %i calls "
3905 "executed %lli times",
3906 decl, count,
3907 (long long)dyn_count.to_gcov_type ());
3908 }
3909 }
3910
3911 delete (final_warning_records);
3912 final_warning_records = 0;
3913 }
3914
3915 if (dump_file)
3916 fprintf (dump_file,
3917 "%i polymorphic calls, %i devirtualized,"
3918 " %i speculatively devirtualized, %i cold\n"
3919 "%i have multiple targets, %i overwritable,"
3920 " %i already speculated (%i agree, %i disagree),"
3921 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3922 npolymorphic, ndevirtualized, nconverted, ncold,
3923 nmultiple, noverwritable, nspeculated, nok, nwrong,
3924 nexternal, nnotdefined, nartificial, ndropped);
3925 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3926 }
3927
3928 namespace {
3929
3930 const pass_data pass_data_ipa_devirt =
3931 {
3932 IPA_PASS, /* type */
3933 "devirt", /* name */
3934 OPTGROUP_NONE, /* optinfo_flags */
3935 TV_IPA_DEVIRT, /* tv_id */
3936 0, /* properties_required */
3937 0, /* properties_provided */
3938 0, /* properties_destroyed */
3939 0, /* todo_flags_start */
3940 ( TODO_dump_symtab ), /* todo_flags_finish */
3941 };
3942
3943 class pass_ipa_devirt : public ipa_opt_pass_d
3944 {
3945 public:
3946 pass_ipa_devirt (gcc::context *ctxt)
3947 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3948 NULL, /* generate_summary */
3949 NULL, /* write_summary */
3950 NULL, /* read_summary */
3951 NULL, /* write_optimization_summary */
3952 NULL, /* read_optimization_summary */
3953 NULL, /* stmt_fixup */
3954 0, /* function_transform_todo_flags_start */
3955 NULL, /* function_transform */
3956 NULL) /* variable_transform */
3957 {}
3958
3959 /* opt_pass methods: */
3960 virtual bool gate (function *)
3961 {
3962 /* In LTO, always run the IPA passes and decide on function basis if the
3963 pass is enabled. */
3964 if (in_lto_p)
3965 return true;
3966 return (flag_devirtualize
3967 && (flag_devirtualize_speculatively
3968 || (warn_suggest_final_methods
3969 || warn_suggest_final_types))
3970 && optimize);
3971 }
3972
3973 virtual unsigned int execute (function *) { return ipa_devirt (); }
3974
3975 }; // class pass_ipa_devirt
3976
3977 } // anon namespace
3978
3979 ipa_opt_pass_d *
3980 make_pass_ipa_devirt (gcc::context *ctxt)
3981 {
3982 return new pass_ipa_devirt (ctxt);
3983 }
3984
3985 /* Print ODR name of a TYPE if available.
3986 Use demangler when option DEMANGLE is used. */
3987
3988 DEBUG_FUNCTION void
3989 debug_tree_odr_name (tree type, bool demangle)
3990 {
3991 const char *odr = get_odr_name_for_type (type);
3992 if (demangle)
3993 {
3994 const int opts = DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES;
3995 odr = cplus_demangle (odr, opts);
3996 }
3997
3998 fprintf (stderr, "%s\n", odr);
3999 }
4000
4001 #include "gt-ipa-devirt.h"