]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
ipa-devirt.c (odr_subtypes_equivalent_p): Fix formating.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "hash-set.h"
113 #include "machmode.h"
114 #include "hash-map.h"
115 #include "vec.h"
116 #include "double-int.h"
117 #include "input.h"
118 #include "alias.h"
119 #include "symtab.h"
120 #include "wide-int.h"
121 #include "inchash.h"
122 #include "tree.h"
123 #include "fold-const.h"
124 #include "print-tree.h"
125 #include "calls.h"
126 #include "predict.h"
127 #include "basic-block.h"
128 #include "is-a.h"
129 #include "plugin-api.h"
130 #include "hard-reg-set.h"
131 #include "function.h"
132 #include "ipa-ref.h"
133 #include "cgraph.h"
134 #include "hashtab.h"
135 #include "rtl.h"
136 #include "flags.h"
137 #include "statistics.h"
138 #include "real.h"
139 #include "fixed-value.h"
140 #include "insn-config.h"
141 #include "expmed.h"
142 #include "dojump.h"
143 #include "explow.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "tree-pass.h"
149 #include "target.h"
150 #include "hash-table.h"
151 #include "tree-pretty-print.h"
152 #include "ipa-utils.h"
153 #include "tree-ssa-alias.h"
154 #include "internal-fn.h"
155 #include "gimple-fold.h"
156 #include "gimple-expr.h"
157 #include "gimple.h"
158 #include "alloc-pool.h"
159 #include "symbol-summary.h"
160 #include "ipa-prop.h"
161 #include "ipa-inline.h"
162 #include "diagnostic.h"
163 #include "tree-dfa.h"
164 #include "demangle.h"
165 #include "dbgcnt.h"
166 #include "gimple-pretty-print.h"
167 #include "stor-layout.h"
168 #include "intl.h"
169
170 /* Hash based set of pairs of types. */
171 typedef struct
172 {
173 tree first;
174 tree second;
175 } type_pair;
176
177 struct pair_traits : default_hashset_traits
178 {
179 static hashval_t
180 hash (type_pair p)
181 {
182 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
183 }
184 static bool
185 is_empty (type_pair p)
186 {
187 return p.first == NULL;
188 }
189 static bool
190 is_deleted (type_pair p ATTRIBUTE_UNUSED)
191 {
192 return false;
193 }
194 static bool
195 equal (const type_pair &a, const type_pair &b)
196 {
197 return a.first==b.first && a.second == b.second;
198 }
199 static void
200 mark_empty (type_pair &e)
201 {
202 e.first = NULL;
203 }
204 };
205
206 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
207 hash_set<type_pair,pair_traits> *);
208
209 static bool odr_violation_reported = false;
210
211
212 /* Pointer set of all call targets appearing in the cache. */
213 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
214
215 /* The node of type inheritance graph. For each type unique in
216 One Definition Rule (ODR) sense, we produce one node linking all
217 main variants of types equivalent to it, bases and derived types. */
218
219 struct GTY(()) odr_type_d
220 {
221 /* leader type. */
222 tree type;
223 /* All bases; built only for main variants of types. */
224 vec<odr_type> GTY((skip)) bases;
225 /* All derived types with virtual methods seen in unit;
226 built only for main variants of types. */
227 vec<odr_type> GTY((skip)) derived_types;
228
229 /* All equivalent types, if more than one. */
230 vec<tree, va_gc> *types;
231 /* Set of all equivalent types, if NON-NULL. */
232 hash_set<tree> * GTY((skip)) types_set;
233
234 /* Unique ID indexing the type in odr_types array. */
235 int id;
236 /* Is it in anonymous namespace? */
237 bool anonymous_namespace;
238 /* Do we know about all derivations of given type? */
239 bool all_derivations_known;
240 /* Did we report ODR violation here? */
241 bool odr_violated;
242 };
243
244 /* Return TRUE if all derived types of T are known and thus
245 we may consider the walk of derived type complete.
246
247 This is typically true only for final anonymous namespace types and types
248 defined within functions (that may be COMDAT and thus shared across units,
249 but with the same set of derived types). */
250
251 bool
252 type_all_derivations_known_p (const_tree t)
253 {
254 if (TYPE_FINAL_P (t))
255 return true;
256 if (flag_ltrans)
257 return false;
258 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
259 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
260 return true;
261 if (type_in_anonymous_namespace_p (t))
262 return true;
263 return (decl_function_context (TYPE_NAME (t)) != NULL);
264 }
265
266 /* Return TRUE if type's constructors are all visible. */
267
268 static bool
269 type_all_ctors_visible_p (tree t)
270 {
271 return !flag_ltrans
272 && symtab->state >= CONSTRUCTION
273 /* We can not always use type_all_derivations_known_p.
274 For function local types we must assume case where
275 the function is COMDAT and shared in between units.
276
277 TODO: These cases are quite easy to get, but we need
278 to keep track of C++ privatizing via -Wno-weak
279 as well as the IPA privatizing. */
280 && type_in_anonymous_namespace_p (t);
281 }
282
283 /* Return TRUE if type may have instance. */
284
285 static bool
286 type_possibly_instantiated_p (tree t)
287 {
288 tree vtable;
289 varpool_node *vnode;
290
291 /* TODO: Add abstract types here. */
292 if (!type_all_ctors_visible_p (t))
293 return true;
294
295 vtable = BINFO_VTABLE (TYPE_BINFO (t));
296 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
297 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
298 vnode = varpool_node::get (vtable);
299 return vnode && vnode->definition;
300 }
301
302 /* One Definition Rule hashtable helpers. */
303
304 struct odr_hasher
305 {
306 typedef odr_type_d value_type;
307 typedef union tree_node compare_type;
308 static inline hashval_t hash (const value_type *);
309 static inline bool equal (const value_type *, const compare_type *);
310 static inline void remove (value_type *);
311 };
312
313 /* Return type that was declared with T's name so that T is an
314 qualified variant of it. */
315
316 static inline tree
317 main_odr_variant (const_tree t)
318 {
319 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
320 return TREE_TYPE (TYPE_NAME (t));
321 /* Unnamed types and non-C++ produced types can be compared by variants. */
322 else
323 return TYPE_MAIN_VARIANT (t);
324 }
325
326 /* Produce hash based on type name. */
327
328 static hashval_t
329 hash_type_name (tree t)
330 {
331 gcc_checking_assert (main_odr_variant (t) == t);
332
333 /* If not in LTO, all main variants are unique, so we can do
334 pointer hash. */
335 if (!in_lto_p)
336 return htab_hash_pointer (t);
337
338 /* Anonymous types are unique. */
339 if (type_in_anonymous_namespace_p (t))
340 return htab_hash_pointer (t);
341
342 /* ODR types have name specified. */
343 if (TYPE_NAME (t)
344 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
345 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
346
347 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
348 we can simply hash the virtual table. */
349 if (TREE_CODE (t) == RECORD_TYPE
350 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
351 {
352 tree v = BINFO_VTABLE (TYPE_BINFO (t));
353 hashval_t hash = 0;
354
355 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
356 {
357 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
358 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
359 }
360
361 v = DECL_ASSEMBLER_NAME (v);
362 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
363 return hash;
364 }
365
366 /* Builtin types may appear as main variants of ODR types and are unique.
367 Sanity check we do not get anything that looks non-builtin. */
368 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
369 || TREE_CODE (t) == VOID_TYPE
370 || TREE_CODE (t) == COMPLEX_TYPE
371 || TREE_CODE (t) == REAL_TYPE
372 || TREE_CODE (t) == POINTER_TYPE);
373 return htab_hash_pointer (t);
374 }
375
376 /* Return the computed hashcode for ODR_TYPE. */
377
378 inline hashval_t
379 odr_hasher::hash (const value_type *odr_type)
380 {
381 return hash_type_name (odr_type->type);
382 }
383
384 /* For languages with One Definition Rule, work out if
385 types are the same based on their name.
386
387 This is non-trivial for LTO where minor differences in
388 the type representation may have prevented type merging
389 to merge two copies of otherwise equivalent type.
390
391 Until we start streaming mangled type names, this function works
392 only for polymorphic types. */
393
394 bool
395 types_same_for_odr (const_tree type1, const_tree type2)
396 {
397 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
398
399 type1 = main_odr_variant (type1);
400 type2 = main_odr_variant (type2);
401
402 if (type1 == type2)
403 return true;
404
405 if (!in_lto_p)
406 return false;
407
408 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
409 on the corresponding TYPE_STUB_DECL. */
410 if (type_in_anonymous_namespace_p (type1)
411 || type_in_anonymous_namespace_p (type2))
412 return false;
413
414
415 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
416
417 Ideally we should never need types without ODR names here. It can however
418 happen in two cases:
419
420 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
421 Here testing for equivalence is safe, since their MAIN_VARIANTs are
422 unique.
423 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
424 establish precise ODR equivalency, but for correctness we care only
425 about equivalency on complete polymorphic types. For these we can
426 compare assembler names of their virtual tables. */
427 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
428 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
429 {
430 /* See if types are obviously different (i.e. different codes
431 or polymorphic wrt non-polymorphic). This is not strictly correct
432 for ODR violating programs, but we can't do better without streaming
433 ODR names. */
434 if (TREE_CODE (type1) != TREE_CODE (type2))
435 return false;
436 if (TREE_CODE (type1) == RECORD_TYPE
437 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
438 return false;
439 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
440 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
441 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
442 return false;
443
444 /* At the moment we have no way to establish ODR equivalence at LTO
445 other than comparing virtual table pointers of polymorphic types.
446 Eventually we should start saving mangled names in TYPE_NAME.
447 Then this condition will become non-trivial. */
448
449 if (TREE_CODE (type1) == RECORD_TYPE
450 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
451 && BINFO_VTABLE (TYPE_BINFO (type1))
452 && BINFO_VTABLE (TYPE_BINFO (type2)))
453 {
454 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
455 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
456 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
457 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
458 return (operand_equal_p (TREE_OPERAND (v1, 1),
459 TREE_OPERAND (v2, 1), 0)
460 && DECL_ASSEMBLER_NAME
461 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
462 == DECL_ASSEMBLER_NAME
463 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
464 }
465 gcc_unreachable ();
466 }
467 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
468 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
469 }
470
471 /* Return true if we can decide on ODR equivalency.
472
473 In non-LTO it is always decide, in LTO however it depends in the type has
474 ODR info attached. */
475
476 bool
477 types_odr_comparable (tree t1, tree t2)
478 {
479 return (!in_lto_p
480 || main_odr_variant (t1) == main_odr_variant (t2)
481 || (odr_type_p (t1) && odr_type_p (t2))
482 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
483 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
484 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
485 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
486 }
487
488 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
489 known, be conservative and return false. */
490
491 bool
492 types_must_be_same_for_odr (tree t1, tree t2)
493 {
494 if (types_odr_comparable (t1, t2))
495 return types_same_for_odr (t1, t2);
496 else
497 return main_odr_variant (t1) == main_odr_variant (t2);
498 }
499
500 /* Compare types T1 and T2 and return true if they are
501 equivalent. */
502
503 inline bool
504 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
505 {
506 tree t2 = const_cast <tree> (ct2);
507
508 gcc_checking_assert (main_odr_variant (t2) == t2);
509 if (t1->type == t2)
510 return true;
511 if (!in_lto_p)
512 return false;
513 return types_same_for_odr (t1->type, t2);
514 }
515
516 /* Free ODR type V. */
517
518 inline void
519 odr_hasher::remove (value_type *v)
520 {
521 v->bases.release ();
522 v->derived_types.release ();
523 if (v->types_set)
524 delete v->types_set;
525 ggc_free (v);
526 }
527
528 /* ODR type hash used to look up ODR type based on tree type node. */
529
530 typedef hash_table<odr_hasher> odr_hash_type;
531 static odr_hash_type *odr_hash;
532
533 /* ODR types are also stored into ODR_TYPE vector to allow consistent
534 walking. Bases appear before derived types. Vector is garbage collected
535 so we won't end up visiting empty types. */
536
537 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
538 #define odr_types (*odr_types_ptr)
539
540 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
541 void
542 set_type_binfo (tree type, tree binfo)
543 {
544 for (; type; type = TYPE_NEXT_VARIANT (type))
545 if (COMPLETE_TYPE_P (type))
546 TYPE_BINFO (type) = binfo;
547 else
548 gcc_assert (!TYPE_BINFO (type));
549 }
550
551 /* Compare T2 and T2 based on name or structure. */
552
553 static bool
554 odr_subtypes_equivalent_p (tree t1, tree t2,
555 hash_set<type_pair,pair_traits> *visited)
556 {
557 bool an1, an2;
558
559 /* This can happen in incomplete types that should be handled earlier. */
560 gcc_assert (t1 && t2);
561
562 t1 = main_odr_variant (t1);
563 t2 = main_odr_variant (t2);
564 if (t1 == t2)
565 return true;
566
567 /* Anonymous namespace types must match exactly. */
568 an1 = type_in_anonymous_namespace_p (t1);
569 an2 = type_in_anonymous_namespace_p (t2);
570 if (an1 != an2 || an1)
571 return false;
572
573 /* For ODR types be sure to compare their names.
574 To support -wno-odr-type-merging we allow one type to be non-ODR
575 and other ODR even though it is a violation. */
576 if (types_odr_comparable (t1, t2))
577 {
578 if (!types_same_for_odr (t1, t2))
579 return false;
580 /* Limit recursion: If subtypes are ODR types and we know
581 that they are same, be happy. */
582 if (!get_odr_type (t1, true)->odr_violated)
583 return true;
584 }
585
586 /* Component types, builtins and possibly violating ODR types
587 have to be compared structurally. */
588 if (TREE_CODE (t1) != TREE_CODE (t2))
589 return false;
590 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
591 return false;
592 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
593 return false;
594
595 type_pair pair={t1,t2};
596 if (TYPE_UID (t1) > TYPE_UID (t2))
597 {
598 pair.first = t2;
599 pair.second = t1;
600 }
601 if (visited->add (pair))
602 return true;
603 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
604 }
605
606 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
607 violation warnings. */
608
609 void
610 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
611 {
612 int n1, n2;
613 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
614 {
615 odr_violation_reported = true;
616 if (DECL_VIRTUAL_P (prevailing->decl))
617 {
618 varpool_node *tmp = prevailing;
619 prevailing = vtable;
620 vtable = tmp;
621 }
622 if (warning_at (DECL_SOURCE_LOCATION
623 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
624 OPT_Wodr,
625 "virtual table of type %qD violates one definition rule",
626 DECL_CONTEXT (vtable->decl)))
627 inform (DECL_SOURCE_LOCATION (prevailing->decl),
628 "variable of same assembler name as the virtual table is "
629 "defined in another translation unit");
630 return;
631 }
632 if (!prevailing->definition || !vtable->definition)
633 return;
634 for (n1 = 0, n2 = 0; true; n1++, n2++)
635 {
636 struct ipa_ref *ref1, *ref2;
637 bool end1, end2;
638
639 end1 = !prevailing->iterate_reference (n1, ref1);
640 end2 = !vtable->iterate_reference (n2, ref2);
641
642 /* !DECL_VIRTUAL_P means RTTI entry;
643 We warn when RTTI is lost because non-RTTI previals; we silently
644 accept the other case. */
645 while (!end2
646 && (end1
647 || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
648 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
649 && DECL_VIRTUAL_P (ref1->referred->decl)))
650 && !DECL_VIRTUAL_P (ref2->referred->decl))
651 {
652 if (warning_at (DECL_SOURCE_LOCATION
653 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
654 "virtual table of type %qD contains RTTI information",
655 DECL_CONTEXT (vtable->decl)))
656 {
657 inform (DECL_SOURCE_LOCATION
658 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
659 "but is prevailed by one without from other translation "
660 "unit");
661 inform (DECL_SOURCE_LOCATION
662 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
663 "RTTI will not work on this type");
664 }
665 n2++;
666 end2 = !vtable->iterate_reference (n2, ref2);
667 }
668 while (!end1
669 && (end2
670 || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
671 != DECL_ASSEMBLER_NAME (ref1->referred->decl)
672 && DECL_VIRTUAL_P (ref2->referred->decl)))
673 && !DECL_VIRTUAL_P (ref1->referred->decl))
674 {
675 n1++;
676 end1 = !vtable->iterate_reference (n1, ref1);
677 }
678
679 /* Finished? */
680 if (end1 && end2)
681 {
682 /* Extra paranoia; compare the sizes. We do not have information
683 about virtual inheritance offsets, so just be sure that these
684 match.
685 Do this as very last check so the not very informative error
686 is not output too often. */
687 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
688 {
689 if (warning_at (DECL_SOURCE_LOCATION
690 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
691 "virtual table of type %qD violates "
692 "one definition rule ",
693 DECL_CONTEXT (vtable->decl)))
694 {
695 inform (DECL_SOURCE_LOCATION
696 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
697 "the conflicting type defined in another translation "
698 "unit has virtual table of different size");
699 }
700 }
701 return;
702 }
703
704 if (!end1 && !end2)
705 {
706 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
707 == DECL_ASSEMBLER_NAME (ref2->referred->decl))
708 continue;
709
710 /* If the loops above stopped on non-virtual pointer, we have
711 mismatch in RTTI information mangling. */
712 if (!DECL_VIRTUAL_P (ref1->referred->decl)
713 && !DECL_VIRTUAL_P (ref2->referred->decl))
714 {
715 if (warning_at (DECL_SOURCE_LOCATION
716 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
717 "virtual table of type %qD violates "
718 "one definition rule ",
719 DECL_CONTEXT (vtable->decl)))
720 {
721 inform (DECL_SOURCE_LOCATION
722 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
723 "the conflicting type defined in another translation "
724 "unit virtual table with different RTTI information");
725 }
726 return;
727 }
728 /* At this point both REF1 and REF2 points either to virtual table
729 or virtual method. If one points to virtual table and other to
730 method we can complain the same way as if one table was shorter
731 than other pointing out the extra method. */
732 gcc_assert (DECL_VIRTUAL_P (ref1->referred->decl)
733 && (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL
734 || TREE_CODE (ref1->referred->decl) == VAR_DECL));
735 gcc_assert (DECL_VIRTUAL_P (ref2->referred->decl)
736 && (TREE_CODE (ref2->referred->decl) == FUNCTION_DECL
737 || TREE_CODE (ref2->referred->decl) == VAR_DECL));
738 if (TREE_CODE (ref1->referred->decl)
739 != TREE_CODE (ref2->referred->decl))
740 {
741 if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
742 end1 = true;
743 else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
744 end2 = true;
745 }
746 }
747
748 /* Complain about size mismatch. Either we have too many virutal
749 functions or too many virtual table pointers. */
750 if (end1 || end2)
751 {
752 if (end1)
753 {
754 varpool_node *tmp = prevailing;
755 prevailing = vtable;
756 vtable = tmp;
757 ref1 = ref2;
758 }
759 if (warning_at (DECL_SOURCE_LOCATION
760 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
761 "virtual table of type %qD violates "
762 "one definition rule",
763 DECL_CONTEXT (vtable->decl)))
764 {
765 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
766 {
767 inform (DECL_SOURCE_LOCATION
768 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
769 "the conflicting type defined in another translation "
770 "unit");
771 inform (DECL_SOURCE_LOCATION
772 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
773 "contains additional virtual method %qD",
774 ref1->referred->decl);
775 }
776 else
777 {
778 inform (DECL_SOURCE_LOCATION
779 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
780 "the conflicting type defined in another translation "
781 "unit has virtual table table with more entries");
782 }
783 }
784 return;
785 }
786
787 /* And in the last case we have either mistmatch in between two virtual
788 methods or two virtual table pointers. */
789 if (warning_at (DECL_SOURCE_LOCATION
790 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
791 "virtual table of type %qD violates "
792 "one definition rule ",
793 DECL_CONTEXT (vtable->decl)))
794 {
795 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
796 {
797 inform (DECL_SOURCE_LOCATION
798 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
799 "the conflicting type defined in another translation "
800 "unit");
801 gcc_assert (TREE_CODE (ref2->referred->decl)
802 == FUNCTION_DECL);
803 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
804 "virtual method %qD", ref1->referred->decl);
805 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
806 "ought to match virtual method %qD but does not",
807 ref2->referred->decl);
808 }
809 else
810 inform (DECL_SOURCE_LOCATION
811 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
812 "the conflicting type defined in another translation "
813 "unit has virtual table table with different contents");
814 return;
815 }
816 }
817 }
818
819 /* Output ODR violation warning about T1 and T2 with REASON.
820 Display location of ST1 and ST2 if REASON speaks about field or
821 method of the type.
822 If WARN is false, do nothing. Set WARNED if warning was indeed
823 output. */
824
825 void
826 warn_odr (tree t1, tree t2, tree st1, tree st2,
827 bool warn, bool *warned, const char *reason)
828 {
829 tree decl2 = TYPE_NAME (t2);
830 if (warned)
831 *warned = false;
832
833 if (!warn)
834 return;
835 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
836 "type %qT violates one definition rule",
837 t1))
838 return;
839 if (!st1 && !st2)
840 ;
841 /* For FIELD_DECL support also case where one of fields is
842 NULL - this is used when the structures have mismatching number of
843 elements. */
844 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
845 {
846 inform (DECL_SOURCE_LOCATION (decl2),
847 "a different type is defined in another translation unit");
848 if (!st1)
849 {
850 st1 = st2;
851 st2 = NULL;
852 }
853 inform (DECL_SOURCE_LOCATION (st1),
854 "the first difference of corresponding definitions is field %qD",
855 st1);
856 if (st2)
857 decl2 = st2;
858 }
859 else if (TREE_CODE (st1) == FUNCTION_DECL)
860 {
861 inform (DECL_SOURCE_LOCATION (decl2),
862 "a different type is defined in another translation unit");
863 inform (DECL_SOURCE_LOCATION (st1),
864 "the first difference of corresponding definitions is method %qD",
865 st1);
866 decl2 = st2;
867 }
868 else
869 return;
870 inform (DECL_SOURCE_LOCATION (decl2), reason);
871
872 if (warned)
873 *warned = true;
874 }
875
876 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
877 because they are used on same place in ODR matching types.
878 They are not; inform the user. */
879
880 void
881 warn_types_mismatch (tree t1, tree t2)
882 {
883 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
884 return;
885 /* In Firefox it is a common bug to have same types but in
886 different namespaces. Be a bit more informative on
887 this. */
888 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
889 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
890 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
891 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
892 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
893 DECL_NAME (TYPE_CONTEXT (t2))))))
894 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
895 "type %qT should match type %qT but is defined "
896 "in different namespace ",
897 t1, t2);
898 else
899 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
900 "type %qT should match type %qT",
901 t1, t2);
902 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
903 "the incompatible type is defined here");
904 }
905
906 /* Compare T1 and T2, report ODR violations if WARN is true and set
907 WARNED to true if anything is reported. Return true if types match.
908 If true is returned, the types are also compatible in the sense of
909 gimple_canonical_types_compatible_p. */
910
911 static bool
912 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
913 hash_set<type_pair,pair_traits> *visited)
914 {
915 /* Check first for the obvious case of pointer identity. */
916 if (t1 == t2)
917 return true;
918 gcc_assert (!type_in_anonymous_namespace_p (t1));
919 gcc_assert (!type_in_anonymous_namespace_p (t2));
920
921 /* Can't be the same type if the types don't have the same code. */
922 if (TREE_CODE (t1) != TREE_CODE (t2))
923 {
924 warn_odr (t1, t2, NULL, NULL, warn, warned,
925 G_("a different type is defined in another translation unit"));
926 return false;
927 }
928
929 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
930 {
931 warn_odr (t1, t2, NULL, NULL, warn, warned,
932 G_("a type with different qualifiers is defined in another "
933 "translation unit"));
934 return false;
935 }
936
937 if (comp_type_attributes (t1, t2) != 1)
938 {
939 warn_odr (t1, t2, NULL, NULL, warn, warned,
940 G_("a type with attributes "
941 "is defined in another translation unit"));
942 return false;
943 }
944
945 if (TREE_CODE (t1) == ENUMERAL_TYPE
946 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
947 {
948 tree v1, v2;
949 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
950 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
951 {
952 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
953 {
954 warn_odr (t1, t2, NULL, NULL, warn, warned,
955 G_("an enum with different value name"
956 " is defined in another translation unit"));
957 return false;
958 }
959 if (TREE_VALUE (v1) != TREE_VALUE (v2)
960 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
961 DECL_INITIAL (TREE_VALUE (v2)), 0))
962 {
963 warn_odr (t1, t2, NULL, NULL, warn, warned,
964 G_("an enum with different values is defined"
965 " in another translation unit"));
966 return false;
967 }
968 }
969 if (v1 || v2)
970 {
971 warn_odr (t1, t2, NULL, NULL, warn, warned,
972 G_("an enum with mismatching number of values "
973 "is defined in another translation unit"));
974 return false;
975 }
976 }
977
978 /* Non-aggregate types can be handled cheaply. */
979 if (INTEGRAL_TYPE_P (t1)
980 || SCALAR_FLOAT_TYPE_P (t1)
981 || FIXED_POINT_TYPE_P (t1)
982 || TREE_CODE (t1) == VECTOR_TYPE
983 || TREE_CODE (t1) == COMPLEX_TYPE
984 || TREE_CODE (t1) == OFFSET_TYPE
985 || POINTER_TYPE_P (t1))
986 {
987 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
988 {
989 warn_odr (t1, t2, NULL, NULL, warn, warned,
990 G_("a type with different precision is defined "
991 "in another translation unit"));
992 return false;
993 }
994 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
995 {
996 warn_odr (t1, t2, NULL, NULL, warn, warned,
997 G_("a type with different signedness is defined "
998 "in another translation unit"));
999 return false;
1000 }
1001
1002 if (TREE_CODE (t1) == INTEGER_TYPE
1003 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1004 {
1005 /* char WRT uint_8? */
1006 warn_odr (t1, t2, NULL, NULL, warn, warned,
1007 G_("a different type is defined in another "
1008 "translation unit"));
1009 return false;
1010 }
1011
1012 /* For canonical type comparisons we do not want to build SCCs
1013 so we cannot compare pointed-to types. But we can, for now,
1014 require the same pointed-to type kind and match what
1015 useless_type_conversion_p would do. */
1016 if (POINTER_TYPE_P (t1))
1017 {
1018 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1019 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1020 {
1021 warn_odr (t1, t2, NULL, NULL, warn, warned,
1022 G_("it is defined as a pointer in different address "
1023 "space in another translation unit"));
1024 return false;
1025 }
1026
1027 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1028 {
1029 warn_odr (t1, t2, NULL, NULL, warn, warned,
1030 G_("it is defined as a pointer to different type "
1031 "in another translation unit"));
1032 if (warn && warned)
1033 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1034 return false;
1035 }
1036 }
1037
1038 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1039 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1040 {
1041 /* Probably specific enough. */
1042 warn_odr (t1, t2, NULL, NULL, warn, warned,
1043 G_("a different type is defined "
1044 "in another translation unit"));
1045 if (warn && warned)
1046 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1047 return false;
1048 }
1049 }
1050 /* Do type-specific comparisons. */
1051 else switch (TREE_CODE (t1))
1052 {
1053 case ARRAY_TYPE:
1054 {
1055 /* Array types are the same if the element types are the same and
1056 the number of elements are the same. */
1057 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1058 {
1059 warn_odr (t1, t2, NULL, NULL, warn, warned,
1060 G_("a different type is defined in another "
1061 "translation unit"));
1062 if (warn && warned)
1063 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1064 }
1065 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1066 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1067 == TYPE_NONALIASED_COMPONENT (t2));
1068
1069 tree i1 = TYPE_DOMAIN (t1);
1070 tree i2 = TYPE_DOMAIN (t2);
1071
1072 /* For an incomplete external array, the type domain can be
1073 NULL_TREE. Check this condition also. */
1074 if (i1 == NULL_TREE || i2 == NULL_TREE)
1075 return true;
1076
1077 tree min1 = TYPE_MIN_VALUE (i1);
1078 tree min2 = TYPE_MIN_VALUE (i2);
1079 tree max1 = TYPE_MAX_VALUE (i1);
1080 tree max2 = TYPE_MAX_VALUE (i2);
1081
1082 /* In C++, minimums should be always 0. */
1083 gcc_assert (min1 == min2);
1084 if (!operand_equal_p (max1, max2, 0))
1085 {
1086 warn_odr (t1, t2, NULL, NULL, warn, warned,
1087 G_("an array of different size is defined "
1088 "in another translation unit"));
1089 return false;
1090 }
1091 }
1092 break;
1093
1094 case METHOD_TYPE:
1095 case FUNCTION_TYPE:
1096 /* Function types are the same if the return type and arguments types
1097 are the same. */
1098 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1099 {
1100 warn_odr (t1, t2, NULL, NULL, warn, warned,
1101 G_("has different return value "
1102 "in another translation unit"));
1103 if (warn && warned)
1104 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1105 return false;
1106 }
1107
1108 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
1109 return true;
1110 else
1111 {
1112 tree parms1, parms2;
1113
1114 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1115 parms1 && parms2;
1116 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1117 {
1118 if (!odr_subtypes_equivalent_p
1119 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1120 {
1121 warn_odr (t1, t2, NULL, NULL, warn, warned,
1122 G_("has different parameters in another "
1123 "translation unit"));
1124 if (warn && warned)
1125 warn_types_mismatch (TREE_VALUE (parms1),
1126 TREE_VALUE (parms2));
1127 return false;
1128 }
1129 }
1130
1131 if (parms1 || parms2)
1132 {
1133 warn_odr (t1, t2, NULL, NULL, warn, warned,
1134 G_("has different parameters "
1135 "in another translation unit"));
1136 return false;
1137 }
1138
1139 return true;
1140 }
1141
1142 case RECORD_TYPE:
1143 case UNION_TYPE:
1144 case QUAL_UNION_TYPE:
1145 {
1146 tree f1, f2;
1147
1148 /* For aggregate types, all the fields must be the same. */
1149 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1150 {
1151 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1152 f1 || f2;
1153 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1154 {
1155 /* Skip non-fields. */
1156 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1157 f1 = TREE_CHAIN (f1);
1158 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1159 f2 = TREE_CHAIN (f2);
1160 if (!f1 || !f2)
1161 break;
1162 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1163 {
1164 warn_odr (t1, t2, NULL, NULL, warn, warned,
1165 G_("a type with different virtual table pointers"
1166 " is defined in another translation unit"));
1167 return false;
1168 }
1169 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1170 {
1171 warn_odr (t1, t2, NULL, NULL, warn, warned,
1172 G_("a type with different bases is defined "
1173 "in another translation unit"));
1174 return false;
1175 }
1176 if (DECL_NAME (f1) != DECL_NAME (f2)
1177 && !DECL_ARTIFICIAL (f1))
1178 {
1179 warn_odr (t1, t2, f1, f2, warn, warned,
1180 G_("a field with different name is defined "
1181 "in another translation unit"));
1182 return false;
1183 }
1184 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1185 TREE_TYPE (f2), visited))
1186 {
1187 /* Do not warn about artificial fields and just go into
1188 generic field mismatch warning. */
1189 if (DECL_ARTIFICIAL (f1))
1190 break;
1191
1192 warn_odr (t1, t2, f1, f2, warn, warned,
1193 G_("a field of same name but different type "
1194 "is defined in another translation unit"));
1195 if (warn && warned)
1196 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1197 return false;
1198 }
1199 if (!gimple_compare_field_offset (f1, f2))
1200 {
1201 /* Do not warn about artificial fields and just go into
1202 generic field mismatch warning. */
1203 if (DECL_ARTIFICIAL (f1))
1204 break;
1205 warn_odr (t1, t2, f1, f2, warn, warned,
1206 G_("fields has different layout "
1207 "in another translation unit"));
1208 return false;
1209 }
1210 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1211 == DECL_NONADDRESSABLE_P (f2));
1212 }
1213
1214 /* If one aggregate has more fields than the other, they
1215 are not the same. */
1216 if (f1 || f2)
1217 {
1218 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1219 warn_odr (t1, t2, NULL, NULL, warn, warned,
1220 G_("a type with different virtual table pointers"
1221 " is defined in another translation unit"));
1222 if ((f1 && DECL_ARTIFICIAL (f1))
1223 || (f2 && DECL_ARTIFICIAL (f2)))
1224 warn_odr (t1, t2, NULL, NULL, warn, warned,
1225 G_("a type with different bases is defined "
1226 "in another translation unit"));
1227 else
1228 warn_odr (t1, t2, f1, f2, warn, warned,
1229 G_("a type with different number of fields "
1230 "is defined in another translation unit"));
1231
1232 return false;
1233 }
1234 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1235 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1236 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1237 {
1238 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1239 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1240 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1241 {
1242 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1243 {
1244 warn_odr (t1, t2, f1, f2, warn, warned,
1245 G_("a different method of same type "
1246 "is defined in another translation unit"));
1247 return false;
1248 }
1249 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1250 {
1251 warn_odr (t1, t2, f1, f2, warn, warned,
1252 G_("s definition that differs by virtual "
1253 "keyword in another translation unit"));
1254 return false;
1255 }
1256 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1257 {
1258 warn_odr (t1, t2, f1, f2, warn, warned,
1259 G_("virtual table layout differs in another "
1260 "translation unit"));
1261 return false;
1262 }
1263 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1264 {
1265 warn_odr (t1, t2, f1, f2, warn, warned,
1266 G_("method with incompatible type is defined "
1267 "in another translation unit"));
1268 return false;
1269 }
1270 }
1271 if (f1 || f2)
1272 {
1273 warn_odr (t1, t2, NULL, NULL, warn, warned,
1274 G_("a type with different number of methods "
1275 "is defined in another translation unit"));
1276 return false;
1277 }
1278 }
1279 }
1280 break;
1281 }
1282 case VOID_TYPE:
1283 break;
1284
1285 default:
1286 debug_tree (t1);
1287 gcc_unreachable ();
1288 }
1289
1290 /* Those are better to come last as they are utterly uninformative. */
1291 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1292 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1293 {
1294 warn_odr (t1, t2, NULL, NULL, warn, warned,
1295 G_("a type with different size "
1296 "is defined in another translation unit"));
1297 return false;
1298 }
1299 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1300 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1301 {
1302 warn_odr (t1, t2, NULL, NULL, warn, warned,
1303 G_("a type with different alignment "
1304 "is defined in another translation unit"));
1305 return false;
1306 }
1307 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1308 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1309 TYPE_SIZE_UNIT (t2), 0));
1310 return true;
1311 }
1312
1313 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1314 from VAL->type. This may happen in LTO where tree merging did not merge
1315 all variants of the same type. It may or may not mean the ODR violation.
1316 Add it to the list of duplicates and warn on some violations. */
1317
1318 static bool
1319 add_type_duplicate (odr_type val, tree type)
1320 {
1321 bool build_bases = false;
1322 if (!val->types_set)
1323 val->types_set = new hash_set<tree>;
1324
1325 /* Always prefer complete type to be the leader. */
1326
1327 if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1328 build_bases = true;
1329 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1330 ;
1331 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1332 && TREE_CODE (type) == ENUMERAL_TYPE
1333 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1334 build_bases = true;
1335 else if (TREE_CODE (val->type) == RECORD_TYPE
1336 && TREE_CODE (type) == RECORD_TYPE
1337 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1338 build_bases = true;
1339
1340 if (build_bases)
1341 {
1342 tree tmp = type;
1343
1344 type = val->type;
1345 val->type = tmp;
1346 }
1347
1348 /* See if this duplicate is new. */
1349 if (!val->types_set->add (type))
1350 {
1351 bool merge = true;
1352 bool base_mismatch = false;
1353 unsigned int i;
1354 bool warned = false;
1355 hash_set<type_pair,pair_traits> visited;
1356
1357 gcc_assert (in_lto_p);
1358 vec_safe_push (val->types, type);
1359
1360 /* First we compare memory layout. */
1361 if (!odr_types_equivalent_p (val->type, type,
1362 !flag_ltrans && !val->odr_violated,
1363 &warned, &visited))
1364 {
1365 merge = false;
1366 odr_violation_reported = true;
1367 val->odr_violated = true;
1368 if (symtab->dump_file)
1369 {
1370 fprintf (symtab->dump_file, "ODR violation\n");
1371
1372 print_node (symtab->dump_file, "", val->type, 0);
1373 putc ('\n',symtab->dump_file);
1374 print_node (symtab->dump_file, "", type, 0);
1375 putc ('\n',symtab->dump_file);
1376 }
1377 }
1378
1379 /* Next sanity check that bases are the same. If not, we will end
1380 up producing wrong answers. */
1381 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1382 && TREE_CODE (val->type) == RECORD_TYPE
1383 && TREE_CODE (type) == RECORD_TYPE
1384 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1385 {
1386 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1387 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1388 {
1389 if (!warned && !val->odr_violated)
1390 {
1391 tree extra_base;
1392 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1393 "a type with the same name but different "
1394 "number of polymorphic bases is "
1395 "defined in another translation unit");
1396 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1397 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1398 extra_base = BINFO_BASE_BINFO
1399 (TYPE_BINFO (type),
1400 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1401 else
1402 extra_base = BINFO_BASE_BINFO
1403 (TYPE_BINFO (val->type),
1404 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1405 inform (DECL_SOURCE_LOCATION
1406 (TYPE_NAME (DECL_CONTEXT (extra_base))),
1407 "the extra base is defined here ");
1408 }
1409 base_mismatch = true;
1410 }
1411 else
1412 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1413 {
1414 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1415 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1416 tree type1 = BINFO_TYPE (base1);
1417 tree type2 = BINFO_TYPE (base2);
1418
1419 if (types_odr_comparable (type1, type2))
1420 {
1421 if (!types_same_for_odr (type1, type2))
1422 base_mismatch = true;
1423 }
1424 else
1425 {
1426 hash_set<type_pair,pair_traits> visited;
1427 if (!odr_types_equivalent_p (type1, type2, false, NULL,
1428 &visited))
1429 base_mismatch = true;
1430 }
1431 if (base_mismatch)
1432 {
1433 if (!warned && !val->odr_violated)
1434 {
1435 warn_odr (type, val->type, NULL, NULL,
1436 !warned, &warned,
1437 "a type with the same name but different base "
1438 "type is defined in another translation unit");
1439 if (warned)
1440 warn_types_mismatch (type1, type2);
1441 }
1442 break;
1443 }
1444 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1445 {
1446 base_mismatch = true;
1447 if (!warned && !val->odr_violated)
1448 warn_odr (type, val->type, NULL, NULL,
1449 !warned, &warned,
1450 "a type with the same name but different base "
1451 "layout is defined in another translation unit");
1452 break;
1453 }
1454 /* One base is polymorphic and the other not.
1455 This ought to be diagnosed earlier, but do not ICE in the
1456 checking bellow. */
1457 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2)
1458 || (TYPE_BINFO (type1)
1459 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1460 != polymorphic_type_binfo_p (TYPE_BINFO (type2))))
1461 {
1462 gcc_assert (val->odr_violated);
1463 base_mismatch = true;
1464 break;
1465 }
1466 }
1467 #ifdef ENABLE_CHECKING
1468 /* Sanity check that all bases will be build same way again. */
1469 if (!base_mismatch && val->bases.length ())
1470 {
1471 unsigned int num_poly_bases = 0;
1472 unsigned int j;
1473
1474 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1475 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1476 (TYPE_BINFO (type), i)))
1477 num_poly_bases++;
1478 gcc_assert (num_poly_bases == val->bases.length ());
1479 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1480 i++)
1481 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1482 (TYPE_BINFO (type), i)))
1483 {
1484 odr_type base = get_odr_type
1485 (BINFO_TYPE
1486 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1487 i)),
1488 true);
1489 gcc_assert (val->bases[j] == base);
1490 j++;
1491 }
1492 }
1493 #endif
1494 if (base_mismatch)
1495 {
1496 merge = false;
1497 odr_violation_reported = true;
1498 val->odr_violated = true;
1499
1500 if (symtab->dump_file)
1501 {
1502 fprintf (symtab->dump_file, "ODR base violation\n");
1503
1504 print_node (symtab->dump_file, "", val->type, 0);
1505 putc ('\n',symtab->dump_file);
1506 print_node (symtab->dump_file, "", type, 0);
1507 putc ('\n',symtab->dump_file);
1508 }
1509 }
1510 }
1511
1512 /* Regularize things a little. During LTO same types may come with
1513 different BINFOs. Either because their virtual table was
1514 not merged by tree merging and only later at decl merging or
1515 because one type comes with external vtable, while other
1516 with internal. We want to merge equivalent binfos to conserve
1517 memory and streaming overhead.
1518
1519 The external vtables are more harmful: they contain references
1520 to external declarations of methods that may be defined in the
1521 merged LTO unit. For this reason we absolutely need to remove
1522 them and replace by internal variants. Not doing so will lead
1523 to incomplete answers from possible_polymorphic_call_targets.
1524
1525 FIXME: disable for now; because ODR types are now build during
1526 streaming in, the variants do not need to be linked to the type,
1527 yet. We need to do the merging in cleanup pass to be implemented
1528 soon. */
1529 if (!flag_ltrans && merge
1530 && 0
1531 && TREE_CODE (val->type) == RECORD_TYPE
1532 && TREE_CODE (type) == RECORD_TYPE
1533 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1534 && TYPE_MAIN_VARIANT (type) == type
1535 && TYPE_MAIN_VARIANT (val->type) == val->type
1536 && BINFO_VTABLE (TYPE_BINFO (val->type))
1537 && BINFO_VTABLE (TYPE_BINFO (type)))
1538 {
1539 tree master_binfo = TYPE_BINFO (val->type);
1540 tree v1 = BINFO_VTABLE (master_binfo);
1541 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1542
1543 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1544 {
1545 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1546 && operand_equal_p (TREE_OPERAND (v1, 1),
1547 TREE_OPERAND (v2, 1), 0));
1548 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1549 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1550 }
1551 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1552 == DECL_ASSEMBLER_NAME (v2));
1553
1554 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1555 {
1556 unsigned int i;
1557
1558 set_type_binfo (val->type, TYPE_BINFO (type));
1559 for (i = 0; i < val->types->length (); i++)
1560 {
1561 if (TYPE_BINFO ((*val->types)[i])
1562 == master_binfo)
1563 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1564 }
1565 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1566 }
1567 else
1568 set_type_binfo (type, master_binfo);
1569 }
1570 }
1571 return build_bases;
1572 }
1573
1574 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1575 possibly new entry. */
1576
1577 odr_type
1578 get_odr_type (tree type, bool insert)
1579 {
1580 odr_type_d **slot;
1581 odr_type val;
1582 hashval_t hash;
1583 bool build_bases = false;
1584 bool insert_to_odr_array = false;
1585 int base_id = -1;
1586
1587 type = main_odr_variant (type);
1588
1589 hash = hash_type_name (type);
1590 slot = odr_hash->find_slot_with_hash (type, hash,
1591 insert ? INSERT : NO_INSERT);
1592 if (!slot)
1593 return NULL;
1594
1595 /* See if we already have entry for type. */
1596 if (*slot)
1597 {
1598 val = *slot;
1599
1600 /* With LTO we need to support multiple tree representation of
1601 the same ODR type. */
1602 if (val->type != type)
1603 build_bases = add_type_duplicate (val, type);
1604 }
1605 else
1606 {
1607 val = ggc_cleared_alloc<odr_type_d> ();
1608 val->type = type;
1609 val->bases = vNULL;
1610 val->derived_types = vNULL;
1611 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1612 build_bases = COMPLETE_TYPE_P (val->type);
1613 insert_to_odr_array = true;
1614 *slot = val;
1615 }
1616
1617 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1618 && type == TYPE_MAIN_VARIANT (type))
1619 {
1620 tree binfo = TYPE_BINFO (type);
1621 unsigned int i;
1622
1623 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1624
1625 val->all_derivations_known = type_all_derivations_known_p (type);
1626 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1627 /* For now record only polymorphic types. other are
1628 pointless for devirtualization and we can not precisely
1629 determine ODR equivalency of these during LTO. */
1630 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1631 {
1632 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1633 i)),
1634 true);
1635 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1636 base->derived_types.safe_push (val);
1637 val->bases.safe_push (base);
1638 if (base->id > base_id)
1639 base_id = base->id;
1640 }
1641 }
1642 /* Ensure that type always appears after bases. */
1643 if (insert_to_odr_array)
1644 {
1645 if (odr_types_ptr)
1646 val->id = odr_types.length ();
1647 vec_safe_push (odr_types_ptr, val);
1648 }
1649 else if (base_id > val->id)
1650 {
1651 odr_types[val->id] = 0;
1652 /* Be sure we did not recorded any derived types; these may need
1653 renumbering too. */
1654 gcc_assert (val->derived_types.length() == 0);
1655 if (odr_types_ptr)
1656 val->id = odr_types.length ();
1657 vec_safe_push (odr_types_ptr, val);
1658 }
1659 return val;
1660 }
1661
1662 /* Add TYPE od ODR type hash. */
1663
1664 void
1665 register_odr_type (tree type)
1666 {
1667 if (!odr_hash)
1668 odr_hash = new odr_hash_type (23);
1669 /* Arrange things to be nicer and insert main variants first. */
1670 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1671 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1672 if (TYPE_MAIN_VARIANT (type) != type)
1673 get_odr_type (type, true);
1674 }
1675
1676 /* Return true if type is known to have no derivations. */
1677
1678 bool
1679 type_known_to_have_no_deriavations_p (tree t)
1680 {
1681 return (type_all_derivations_known_p (t)
1682 && (TYPE_FINAL_P (t)
1683 || (odr_hash
1684 && !get_odr_type (t, true)->derived_types.length())));
1685 }
1686
1687 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
1688 recursive printing. */
1689
1690 static void
1691 dump_odr_type (FILE *f, odr_type t, int indent=0)
1692 {
1693 unsigned int i;
1694 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1695 print_generic_expr (f, t->type, TDF_SLIM);
1696 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1697 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1698 if (TYPE_NAME (t->type))
1699 {
1700 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1701 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1702 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1703 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
1704 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
1705 IDENTIFIER_POINTER
1706 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
1707 }
1708 if (t->bases.length ())
1709 {
1710 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1711 for (i = 0; i < t->bases.length (); i++)
1712 fprintf (f, " %i", t->bases[i]->id);
1713 fprintf (f, "\n");
1714 }
1715 if (t->derived_types.length ())
1716 {
1717 fprintf (f, "%*s derived types:\n", indent * 2, "");
1718 for (i = 0; i < t->derived_types.length (); i++)
1719 dump_odr_type (f, t->derived_types[i], indent + 1);
1720 }
1721 fprintf (f, "\n");
1722 }
1723
1724 /* Dump the type inheritance graph. */
1725
1726 static void
1727 dump_type_inheritance_graph (FILE *f)
1728 {
1729 unsigned int i;
1730 if (!odr_types_ptr)
1731 return;
1732 fprintf (f, "\n\nType inheritance graph:\n");
1733 for (i = 0; i < odr_types.length (); i++)
1734 {
1735 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1736 dump_odr_type (f, odr_types[i]);
1737 }
1738 for (i = 0; i < odr_types.length (); i++)
1739 {
1740 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1741 {
1742 unsigned int j;
1743 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1744 print_node (f, "", odr_types[i]->type, 0);
1745 for (j = 0; j < odr_types[i]->types->length (); j++)
1746 {
1747 tree t;
1748 fprintf (f, "duplicate #%i\n", j);
1749 print_node (f, "", (*odr_types[i]->types)[j], 0);
1750 t = (*odr_types[i]->types)[j];
1751 while (TYPE_P (t) && TYPE_CONTEXT (t))
1752 {
1753 t = TYPE_CONTEXT (t);
1754 print_node (f, "", t, 0);
1755 }
1756 putc ('\n',f);
1757 }
1758 }
1759 }
1760 }
1761
1762 /* Given method type T, return type of class it belongs to.
1763 Look up this pointer and get its type. */
1764
1765 tree
1766 method_class_type (const_tree t)
1767 {
1768 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1769 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1770
1771 return TREE_TYPE (first_parm_type);
1772 }
1773
1774 /* Initialize IPA devirt and build inheritance tree graph. */
1775
1776 void
1777 build_type_inheritance_graph (void)
1778 {
1779 struct symtab_node *n;
1780 FILE *inheritance_dump_file;
1781 int flags;
1782
1783 if (odr_hash)
1784 return;
1785 timevar_push (TV_IPA_INHERITANCE);
1786 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1787 odr_hash = new odr_hash_type (23);
1788
1789 /* We reconstruct the graph starting of types of all methods seen in the
1790 the unit. */
1791 FOR_EACH_SYMBOL (n)
1792 if (is_a <cgraph_node *> (n)
1793 && DECL_VIRTUAL_P (n->decl)
1794 && n->real_symbol_p ())
1795 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1796 true);
1797
1798 /* Look also for virtual tables of types that do not define any methods.
1799
1800 We need it in a case where class B has virtual base of class A
1801 re-defining its virtual method and there is class C with no virtual
1802 methods with B as virtual base.
1803
1804 Here we output B's virtual method in two variant - for non-virtual
1805 and virtual inheritance. B's virtual table has non-virtual version,
1806 while C's has virtual.
1807
1808 For this reason we need to know about C in order to include both
1809 variants of B. More correctly, record_target_from_binfo should
1810 add both variants of the method when walking B, but we have no
1811 link in between them.
1812
1813 We rely on fact that either the method is exported and thus we
1814 assume it is called externally or C is in anonymous namespace and
1815 thus we will see the vtable. */
1816
1817 else if (is_a <varpool_node *> (n)
1818 && DECL_VIRTUAL_P (n->decl)
1819 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1820 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1821 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1822 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1823 if (inheritance_dump_file)
1824 {
1825 dump_type_inheritance_graph (inheritance_dump_file);
1826 dump_end (TDI_inheritance, inheritance_dump_file);
1827 }
1828 timevar_pop (TV_IPA_INHERITANCE);
1829 }
1830
1831 /* Return true if N has reference from live virtual table
1832 (and thus can be a destination of polymorphic call).
1833 Be conservatively correct when callgraph is not built or
1834 if the method may be referred externally. */
1835
1836 static bool
1837 referenced_from_vtable_p (struct cgraph_node *node)
1838 {
1839 int i;
1840 struct ipa_ref *ref;
1841 bool found = false;
1842
1843 if (node->externally_visible
1844 || DECL_EXTERNAL (node->decl)
1845 || node->used_from_other_partition)
1846 return true;
1847
1848 /* Keep this test constant time.
1849 It is unlikely this can happen except for the case where speculative
1850 devirtualization introduced many speculative edges to this node.
1851 In this case the target is very likely alive anyway. */
1852 if (node->ref_list.referring.length () > 100)
1853 return true;
1854
1855 /* We need references built. */
1856 if (symtab->state <= CONSTRUCTION)
1857 return true;
1858
1859 for (i = 0; node->iterate_referring (i, ref); i++)
1860 if ((ref->use == IPA_REF_ALIAS
1861 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1862 || (ref->use == IPA_REF_ADDR
1863 && TREE_CODE (ref->referring->decl) == VAR_DECL
1864 && DECL_VIRTUAL_P (ref->referring->decl)))
1865 {
1866 found = true;
1867 break;
1868 }
1869 return found;
1870 }
1871
1872 /* If TARGET has associated node, record it in the NODES array.
1873 CAN_REFER specify if program can refer to the target directly.
1874 if TARGET is unknown (NULL) or it can not be inserted (for example because
1875 its body was already removed and there is no way to refer to it), clear
1876 COMPLETEP. */
1877
1878 static void
1879 maybe_record_node (vec <cgraph_node *> &nodes,
1880 tree target, hash_set<tree> *inserted,
1881 bool can_refer,
1882 bool *completep)
1883 {
1884 struct cgraph_node *target_node, *alias_target;
1885 enum availability avail;
1886
1887 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1888 list of targets; the runtime effect of calling them is undefined.
1889 Only "real" virtual methods should be accounted. */
1890 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1891 return;
1892
1893 if (!can_refer)
1894 {
1895 /* The only case when method of anonymous namespace becomes unreferable
1896 is when we completely optimized it out. */
1897 if (flag_ltrans
1898 || !target
1899 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1900 *completep = false;
1901 return;
1902 }
1903
1904 if (!target)
1905 return;
1906
1907 target_node = cgraph_node::get (target);
1908
1909 /* Prefer alias target over aliases, so we do not get confused by
1910 fake duplicates. */
1911 if (target_node)
1912 {
1913 alias_target = target_node->ultimate_alias_target (&avail);
1914 if (target_node != alias_target
1915 && avail >= AVAIL_AVAILABLE
1916 && target_node->get_availability ())
1917 target_node = alias_target;
1918 }
1919
1920 /* Method can only be called by polymorphic call if any
1921 of vtables referring to it are alive.
1922
1923 While this holds for non-anonymous functions, too, there are
1924 cases where we want to keep them in the list; for example
1925 inline functions with -fno-weak are static, but we still
1926 may devirtualize them when instance comes from other unit.
1927 The same holds for LTO.
1928
1929 Currently we ignore these functions in speculative devirtualization.
1930 ??? Maybe it would make sense to be more aggressive for LTO even
1931 elsewhere. */
1932 if (!flag_ltrans
1933 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1934 && (!target_node
1935 || !referenced_from_vtable_p (target_node)))
1936 ;
1937 /* See if TARGET is useful function we can deal with. */
1938 else if (target_node != NULL
1939 && (TREE_PUBLIC (target)
1940 || DECL_EXTERNAL (target)
1941 || target_node->definition)
1942 && target_node->real_symbol_p ())
1943 {
1944 gcc_assert (!target_node->global.inlined_to);
1945 gcc_assert (target_node->real_symbol_p ());
1946 if (!inserted->add (target))
1947 {
1948 cached_polymorphic_call_targets->add (target_node);
1949 nodes.safe_push (target_node);
1950 }
1951 }
1952 else if (completep
1953 && (!type_in_anonymous_namespace_p
1954 (DECL_CONTEXT (target))
1955 || flag_ltrans))
1956 *completep = false;
1957 }
1958
1959 /* See if BINFO's type matches OUTER_TYPE. If so, look up
1960 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1961 method in vtable and insert method to NODES array
1962 or BASES_TO_CONSIDER if this array is non-NULL.
1963 Otherwise recurse to base BINFOs.
1964 This matches what get_binfo_at_offset does, but with offset
1965 being unknown.
1966
1967 TYPE_BINFOS is a stack of BINFOS of types with defined
1968 virtual table seen on way from class type to BINFO.
1969
1970 MATCHED_VTABLES tracks virtual tables we already did lookup
1971 for virtual function in. INSERTED tracks nodes we already
1972 inserted.
1973
1974 ANONYMOUS is true if BINFO is part of anonymous namespace.
1975
1976 Clear COMPLETEP when we hit unreferable target.
1977 */
1978
1979 static void
1980 record_target_from_binfo (vec <cgraph_node *> &nodes,
1981 vec <tree> *bases_to_consider,
1982 tree binfo,
1983 tree otr_type,
1984 vec <tree> &type_binfos,
1985 HOST_WIDE_INT otr_token,
1986 tree outer_type,
1987 HOST_WIDE_INT offset,
1988 hash_set<tree> *inserted,
1989 hash_set<tree> *matched_vtables,
1990 bool anonymous,
1991 bool *completep)
1992 {
1993 tree type = BINFO_TYPE (binfo);
1994 int i;
1995 tree base_binfo;
1996
1997
1998 if (BINFO_VTABLE (binfo))
1999 type_binfos.safe_push (binfo);
2000 if (types_same_for_odr (type, outer_type))
2001 {
2002 int i;
2003 tree type_binfo = NULL;
2004
2005 /* Look up BINFO with virtual table. For normal types it is always last
2006 binfo on stack. */
2007 for (i = type_binfos.length () - 1; i >= 0; i--)
2008 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2009 {
2010 type_binfo = type_binfos[i];
2011 break;
2012 }
2013 if (BINFO_VTABLE (binfo))
2014 type_binfos.pop ();
2015 /* If this is duplicated BINFO for base shared by virtual inheritance,
2016 we may not have its associated vtable. This is not a problem, since
2017 we will walk it on the other path. */
2018 if (!type_binfo)
2019 return;
2020 tree inner_binfo = get_binfo_at_offset (type_binfo,
2021 offset, otr_type);
2022 if (!inner_binfo)
2023 {
2024 gcc_assert (odr_violation_reported);
2025 return;
2026 }
2027 /* For types in anonymous namespace first check if the respective vtable
2028 is alive. If not, we know the type can't be called. */
2029 if (!flag_ltrans && anonymous)
2030 {
2031 tree vtable = BINFO_VTABLE (inner_binfo);
2032 varpool_node *vnode;
2033
2034 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2035 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2036 vnode = varpool_node::get (vtable);
2037 if (!vnode || !vnode->definition)
2038 return;
2039 }
2040 gcc_assert (inner_binfo);
2041 if (bases_to_consider
2042 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2043 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2044 {
2045 bool can_refer;
2046 tree target = gimple_get_virt_method_for_binfo (otr_token,
2047 inner_binfo,
2048 &can_refer);
2049 if (!bases_to_consider)
2050 maybe_record_node (nodes, target, inserted, can_refer, completep);
2051 /* Destructors are never called via construction vtables. */
2052 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2053 bases_to_consider->safe_push (target);
2054 }
2055 return;
2056 }
2057
2058 /* Walk bases. */
2059 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2060 /* Walking bases that have no virtual method is pointless exercise. */
2061 if (polymorphic_type_binfo_p (base_binfo))
2062 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2063 type_binfos,
2064 otr_token, outer_type, offset, inserted,
2065 matched_vtables, anonymous, completep);
2066 if (BINFO_VTABLE (binfo))
2067 type_binfos.pop ();
2068 }
2069
2070 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2071 of TYPE, insert them to NODES, recurse into derived nodes.
2072 INSERTED is used to avoid duplicate insertions of methods into NODES.
2073 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2074 Clear COMPLETEP if unreferable target is found.
2075
2076 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2077 all cases where BASE_SKIPPED is true (because the base is abstract
2078 class). */
2079
2080 static void
2081 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2082 hash_set<tree> *inserted,
2083 hash_set<tree> *matched_vtables,
2084 tree otr_type,
2085 odr_type type,
2086 HOST_WIDE_INT otr_token,
2087 tree outer_type,
2088 HOST_WIDE_INT offset,
2089 bool *completep,
2090 vec <tree> &bases_to_consider,
2091 bool consider_construction)
2092 {
2093 tree binfo = TYPE_BINFO (type->type);
2094 unsigned int i;
2095 auto_vec <tree, 8> type_binfos;
2096 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2097
2098 /* We may need to consider types w/o instances because of possible derived
2099 types using their methods either directly or via construction vtables.
2100 We are safe to skip them when all derivations are known, since we will
2101 handle them later.
2102 This is done by recording them to BASES_TO_CONSIDER array. */
2103 if (possibly_instantiated || consider_construction)
2104 {
2105 record_target_from_binfo (nodes,
2106 (!possibly_instantiated
2107 && type_all_derivations_known_p (type->type))
2108 ? &bases_to_consider : NULL,
2109 binfo, otr_type, type_binfos, otr_token,
2110 outer_type, offset,
2111 inserted, matched_vtables,
2112 type->anonymous_namespace, completep);
2113 }
2114 for (i = 0; i < type->derived_types.length (); i++)
2115 possible_polymorphic_call_targets_1 (nodes, inserted,
2116 matched_vtables,
2117 otr_type,
2118 type->derived_types[i],
2119 otr_token, outer_type, offset, completep,
2120 bases_to_consider, consider_construction);
2121 }
2122
2123 /* Cache of queries for polymorphic call targets.
2124
2125 Enumerating all call targets may get expensive when there are many
2126 polymorphic calls in the program, so we memoize all the previous
2127 queries and avoid duplicated work. */
2128
2129 struct polymorphic_call_target_d
2130 {
2131 HOST_WIDE_INT otr_token;
2132 ipa_polymorphic_call_context context;
2133 odr_type type;
2134 vec <cgraph_node *> targets;
2135 tree decl_warning;
2136 int type_warning;
2137 bool complete;
2138 bool speculative;
2139 };
2140
2141 /* Polymorphic call target cache helpers. */
2142
2143 struct polymorphic_call_target_hasher
2144 {
2145 typedef polymorphic_call_target_d value_type;
2146 typedef polymorphic_call_target_d compare_type;
2147 static inline hashval_t hash (const value_type *);
2148 static inline bool equal (const value_type *, const compare_type *);
2149 static inline void remove (value_type *);
2150 };
2151
2152 /* Return the computed hashcode for ODR_QUERY. */
2153
2154 inline hashval_t
2155 polymorphic_call_target_hasher::hash (const value_type *odr_query)
2156 {
2157 inchash::hash hstate (odr_query->otr_token);
2158
2159 hstate.add_wide_int (odr_query->type->id);
2160 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2161 hstate.add_wide_int (odr_query->context.offset);
2162
2163 if (odr_query->context.speculative_outer_type)
2164 {
2165 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2166 hstate.add_wide_int (odr_query->context.speculative_offset);
2167 }
2168 hstate.add_flag (odr_query->speculative);
2169 hstate.add_flag (odr_query->context.maybe_in_construction);
2170 hstate.add_flag (odr_query->context.maybe_derived_type);
2171 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2172 hstate.commit_flag ();
2173 return hstate.end ();
2174 }
2175
2176 /* Compare cache entries T1 and T2. */
2177
2178 inline bool
2179 polymorphic_call_target_hasher::equal (const value_type *t1,
2180 const compare_type *t2)
2181 {
2182 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2183 && t1->speculative == t2->speculative
2184 && t1->context.offset == t2->context.offset
2185 && t1->context.speculative_offset == t2->context.speculative_offset
2186 && t1->context.outer_type == t2->context.outer_type
2187 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2188 && t1->context.maybe_in_construction
2189 == t2->context.maybe_in_construction
2190 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2191 && (t1->context.speculative_maybe_derived_type
2192 == t2->context.speculative_maybe_derived_type));
2193 }
2194
2195 /* Remove entry in polymorphic call target cache hash. */
2196
2197 inline void
2198 polymorphic_call_target_hasher::remove (value_type *v)
2199 {
2200 v->targets.release ();
2201 free (v);
2202 }
2203
2204 /* Polymorphic call target query cache. */
2205
2206 typedef hash_table<polymorphic_call_target_hasher>
2207 polymorphic_call_target_hash_type;
2208 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2209
2210 /* Destroy polymorphic call target query cache. */
2211
2212 static void
2213 free_polymorphic_call_targets_hash ()
2214 {
2215 if (cached_polymorphic_call_targets)
2216 {
2217 delete polymorphic_call_target_hash;
2218 polymorphic_call_target_hash = NULL;
2219 delete cached_polymorphic_call_targets;
2220 cached_polymorphic_call_targets = NULL;
2221 }
2222 }
2223
2224 /* When virtual function is removed, we may need to flush the cache. */
2225
2226 static void
2227 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2228 {
2229 if (cached_polymorphic_call_targets
2230 && cached_polymorphic_call_targets->contains (n))
2231 free_polymorphic_call_targets_hash ();
2232 }
2233
2234 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2235
2236 tree
2237 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2238 tree vtable)
2239 {
2240 tree v = BINFO_VTABLE (binfo);
2241 int i;
2242 tree base_binfo;
2243 unsigned HOST_WIDE_INT this_offset;
2244
2245 if (v)
2246 {
2247 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2248 gcc_unreachable ();
2249
2250 if (offset == this_offset
2251 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2252 return binfo;
2253 }
2254
2255 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2256 if (polymorphic_type_binfo_p (base_binfo))
2257 {
2258 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2259 if (base_binfo)
2260 return base_binfo;
2261 }
2262 return NULL;
2263 }
2264
2265 /* T is known constant value of virtual table pointer.
2266 Store virtual table to V and its offset to OFFSET.
2267 Return false if T does not look like virtual table reference. */
2268
2269 bool
2270 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2271 unsigned HOST_WIDE_INT *offset)
2272 {
2273 /* We expect &MEM[(void *)&virtual_table + 16B].
2274 We obtain object's BINFO from the context of the virtual table.
2275 This one contains pointer to virtual table represented via
2276 POINTER_PLUS_EXPR. Verify that this pointer matches what
2277 we propagated through.
2278
2279 In the case of virtual inheritance, the virtual tables may
2280 be nested, i.e. the offset may be different from 16 and we may
2281 need to dive into the type representation. */
2282 if (TREE_CODE (t) == ADDR_EXPR
2283 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2285 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2286 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2287 == VAR_DECL)
2288 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2289 (TREE_OPERAND (t, 0), 0), 0)))
2290 {
2291 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2292 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2293 return true;
2294 }
2295
2296 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2297 We need to handle it when T comes from static variable initializer or
2298 BINFO. */
2299 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2300 {
2301 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2302 t = TREE_OPERAND (t, 0);
2303 }
2304 else
2305 *offset = 0;
2306
2307 if (TREE_CODE (t) != ADDR_EXPR)
2308 return false;
2309 *v = TREE_OPERAND (t, 0);
2310 return true;
2311 }
2312
2313 /* T is known constant value of virtual table pointer. Return BINFO of the
2314 instance type. */
2315
2316 tree
2317 vtable_pointer_value_to_binfo (const_tree t)
2318 {
2319 tree vtable;
2320 unsigned HOST_WIDE_INT offset;
2321
2322 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2323 return NULL_TREE;
2324
2325 /* FIXME: for stores of construction vtables we return NULL,
2326 because we do not have BINFO for those. Eventually we should fix
2327 our representation to allow this case to be handled, too.
2328 In the case we see store of BINFO we however may assume
2329 that standard folding will be able to cope with it. */
2330 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2331 offset, vtable);
2332 }
2333
2334 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2335 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2336 and insert them in NODES.
2337
2338 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2339
2340 static void
2341 record_targets_from_bases (tree otr_type,
2342 HOST_WIDE_INT otr_token,
2343 tree outer_type,
2344 HOST_WIDE_INT offset,
2345 vec <cgraph_node *> &nodes,
2346 hash_set<tree> *inserted,
2347 hash_set<tree> *matched_vtables,
2348 bool *completep)
2349 {
2350 while (true)
2351 {
2352 HOST_WIDE_INT pos, size;
2353 tree base_binfo;
2354 tree fld;
2355
2356 if (types_same_for_odr (outer_type, otr_type))
2357 return;
2358
2359 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2360 {
2361 if (TREE_CODE (fld) != FIELD_DECL)
2362 continue;
2363
2364 pos = int_bit_position (fld);
2365 size = tree_to_shwi (DECL_SIZE (fld));
2366 if (pos <= offset && (pos + size) > offset
2367 /* Do not get confused by zero sized bases. */
2368 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2369 break;
2370 }
2371 /* Within a class type we should always find corresponding fields. */
2372 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2373
2374 /* Nonbase types should have been stripped by outer_class_type. */
2375 gcc_assert (DECL_ARTIFICIAL (fld));
2376
2377 outer_type = TREE_TYPE (fld);
2378 offset -= pos;
2379
2380 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2381 offset, otr_type);
2382 if (!base_binfo)
2383 {
2384 gcc_assert (odr_violation_reported);
2385 return;
2386 }
2387 gcc_assert (base_binfo);
2388 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2389 {
2390 bool can_refer;
2391 tree target = gimple_get_virt_method_for_binfo (otr_token,
2392 base_binfo,
2393 &can_refer);
2394 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2395 maybe_record_node (nodes, target, inserted, can_refer, completep);
2396 matched_vtables->add (BINFO_VTABLE (base_binfo));
2397 }
2398 }
2399 }
2400
2401 /* When virtual table is removed, we may need to flush the cache. */
2402
2403 static void
2404 devirt_variable_node_removal_hook (varpool_node *n,
2405 void *d ATTRIBUTE_UNUSED)
2406 {
2407 if (cached_polymorphic_call_targets
2408 && DECL_VIRTUAL_P (n->decl)
2409 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2410 free_polymorphic_call_targets_hash ();
2411 }
2412
2413 /* Record about how many calls would benefit from given type to be final. */
2414
2415 struct odr_type_warn_count
2416 {
2417 tree type;
2418 int count;
2419 gcov_type dyn_count;
2420 };
2421
2422 /* Record about how many calls would benefit from given method to be final. */
2423
2424 struct decl_warn_count
2425 {
2426 tree decl;
2427 int count;
2428 gcov_type dyn_count;
2429 };
2430
2431 /* Information about type and decl warnings. */
2432
2433 struct final_warning_record
2434 {
2435 gcov_type dyn_count;
2436 vec<odr_type_warn_count> type_warnings;
2437 hash_map<tree, decl_warn_count> decl_warnings;
2438 };
2439 struct final_warning_record *final_warning_records;
2440
2441 /* Return vector containing possible targets of polymorphic call of type
2442 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2443 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2444 OTR_TYPE and include their virtual method. This is useful for types
2445 possibly in construction or destruction where the virtual table may
2446 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2447 us to walk the inheritance graph for all derivations.
2448
2449 If COMPLETEP is non-NULL, store true if the list is complete.
2450 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2451 in the target cache. If user needs to visit every target list
2452 just once, it can memoize them.
2453
2454 If SPECULATIVE is set, the list will not contain targets that
2455 are not speculatively taken.
2456
2457 Returned vector is placed into cache. It is NOT caller's responsibility
2458 to free it. The vector can be freed on cgraph_remove_node call if
2459 the particular node is a virtual function present in the cache. */
2460
2461 vec <cgraph_node *>
2462 possible_polymorphic_call_targets (tree otr_type,
2463 HOST_WIDE_INT otr_token,
2464 ipa_polymorphic_call_context context,
2465 bool *completep,
2466 void **cache_token,
2467 bool speculative)
2468 {
2469 static struct cgraph_node_hook_list *node_removal_hook_holder;
2470 vec <cgraph_node *> nodes = vNULL;
2471 auto_vec <tree, 8> bases_to_consider;
2472 odr_type type, outer_type;
2473 polymorphic_call_target_d key;
2474 polymorphic_call_target_d **slot;
2475 unsigned int i;
2476 tree binfo, target;
2477 bool complete;
2478 bool can_refer = false;
2479 bool skipped = false;
2480
2481 otr_type = TYPE_MAIN_VARIANT (otr_type);
2482
2483 /* If ODR is not initialized or the context is invalid, return empty
2484 incomplete list. */
2485 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2486 {
2487 if (completep)
2488 *completep = context.invalid;
2489 if (cache_token)
2490 *cache_token = NULL;
2491 return nodes;
2492 }
2493
2494 /* Do not bother to compute speculative info when user do not asks for it. */
2495 if (!speculative || !context.speculative_outer_type)
2496 context.clear_speculation ();
2497
2498 type = get_odr_type (otr_type, true);
2499
2500 /* Recording type variants would waste results cache. */
2501 gcc_assert (!context.outer_type
2502 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2503
2504 /* Look up the outer class type we want to walk.
2505 If we fail to do so, the context is invalid. */
2506 if ((context.outer_type || context.speculative_outer_type)
2507 && !context.restrict_to_inner_class (otr_type))
2508 {
2509 if (completep)
2510 *completep = true;
2511 if (cache_token)
2512 *cache_token = NULL;
2513 return nodes;
2514 }
2515 gcc_assert (!context.invalid);
2516
2517 /* Check that restrict_to_inner_class kept the main variant. */
2518 gcc_assert (!context.outer_type
2519 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2520
2521 /* We canonicalize our query, so we do not need extra hashtable entries. */
2522
2523 /* Without outer type, we have no use for offset. Just do the
2524 basic search from inner type. */
2525 if (!context.outer_type)
2526 context.clear_outer_type (otr_type);
2527 /* We need to update our hierarchy if the type does not exist. */
2528 outer_type = get_odr_type (context.outer_type, true);
2529 /* If the type is complete, there are no derivations. */
2530 if (TYPE_FINAL_P (outer_type->type))
2531 context.maybe_derived_type = false;
2532
2533 /* Initialize query cache. */
2534 if (!cached_polymorphic_call_targets)
2535 {
2536 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2537 polymorphic_call_target_hash
2538 = new polymorphic_call_target_hash_type (23);
2539 if (!node_removal_hook_holder)
2540 {
2541 node_removal_hook_holder =
2542 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2543 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2544 NULL);
2545 }
2546 }
2547
2548 if (in_lto_p)
2549 {
2550 if (context.outer_type != otr_type)
2551 context.outer_type
2552 = get_odr_type (context.outer_type, true)->type;
2553 if (context.speculative_outer_type)
2554 context.speculative_outer_type
2555 = get_odr_type (context.speculative_outer_type, true)->type;
2556 }
2557
2558 /* Look up cached answer. */
2559 key.type = type;
2560 key.otr_token = otr_token;
2561 key.speculative = speculative;
2562 key.context = context;
2563 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2564 if (cache_token)
2565 *cache_token = (void *)*slot;
2566 if (*slot)
2567 {
2568 if (completep)
2569 *completep = (*slot)->complete;
2570 if ((*slot)->type_warning && final_warning_records)
2571 {
2572 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2573 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2574 += final_warning_records->dyn_count;
2575 }
2576 if (!speculative && (*slot)->decl_warning && final_warning_records)
2577 {
2578 struct decl_warn_count *c =
2579 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2580 c->count++;
2581 c->dyn_count += final_warning_records->dyn_count;
2582 }
2583 return (*slot)->targets;
2584 }
2585
2586 complete = true;
2587
2588 /* Do actual search. */
2589 timevar_push (TV_IPA_VIRTUAL_CALL);
2590 *slot = XCNEW (polymorphic_call_target_d);
2591 if (cache_token)
2592 *cache_token = (void *)*slot;
2593 (*slot)->type = type;
2594 (*slot)->otr_token = otr_token;
2595 (*slot)->context = context;
2596 (*slot)->speculative = speculative;
2597
2598 hash_set<tree> inserted;
2599 hash_set<tree> matched_vtables;
2600
2601 /* First insert targets we speculatively identified as likely. */
2602 if (context.speculative_outer_type)
2603 {
2604 odr_type speculative_outer_type;
2605 bool speculation_complete = true;
2606
2607 /* First insert target from type itself and check if it may have
2608 derived types. */
2609 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2610 if (TYPE_FINAL_P (speculative_outer_type->type))
2611 context.speculative_maybe_derived_type = false;
2612 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2613 context.speculative_offset, otr_type);
2614 if (binfo)
2615 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2616 &can_refer);
2617 else
2618 target = NULL;
2619
2620 /* In the case we get complete method, we don't need
2621 to walk derivations. */
2622 if (target && DECL_FINAL_P (target))
2623 context.speculative_maybe_derived_type = false;
2624 if (type_possibly_instantiated_p (speculative_outer_type->type))
2625 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2626 if (binfo)
2627 matched_vtables.add (BINFO_VTABLE (binfo));
2628
2629
2630 /* Next walk recursively all derived types. */
2631 if (context.speculative_maybe_derived_type)
2632 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2633 possible_polymorphic_call_targets_1 (nodes, &inserted,
2634 &matched_vtables,
2635 otr_type,
2636 speculative_outer_type->derived_types[i],
2637 otr_token, speculative_outer_type->type,
2638 context.speculative_offset,
2639 &speculation_complete,
2640 bases_to_consider,
2641 false);
2642 }
2643
2644 if (!speculative || !nodes.length ())
2645 {
2646 /* First see virtual method of type itself. */
2647 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2648 context.offset, otr_type);
2649 if (binfo)
2650 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2651 &can_refer);
2652 else
2653 {
2654 gcc_assert (odr_violation_reported);
2655 target = NULL;
2656 }
2657
2658 /* Destructors are never called through construction virtual tables,
2659 because the type is always known. */
2660 if (target && DECL_CXX_DESTRUCTOR_P (target))
2661 context.maybe_in_construction = false;
2662
2663 if (target)
2664 {
2665 /* In the case we get complete method, we don't need
2666 to walk derivations. */
2667 if (DECL_FINAL_P (target))
2668 context.maybe_derived_type = false;
2669 }
2670
2671 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2672 if (type_possibly_instantiated_p (outer_type->type))
2673 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2674 else
2675 skipped = true;
2676
2677 if (binfo)
2678 matched_vtables.add (BINFO_VTABLE (binfo));
2679
2680 /* Next walk recursively all derived types. */
2681 if (context.maybe_derived_type)
2682 {
2683 for (i = 0; i < outer_type->derived_types.length(); i++)
2684 possible_polymorphic_call_targets_1 (nodes, &inserted,
2685 &matched_vtables,
2686 otr_type,
2687 outer_type->derived_types[i],
2688 otr_token, outer_type->type,
2689 context.offset, &complete,
2690 bases_to_consider,
2691 context.maybe_in_construction);
2692
2693 if (!outer_type->all_derivations_known)
2694 {
2695 if (!speculative && final_warning_records)
2696 {
2697 if (complete
2698 && nodes.length () == 1
2699 && warn_suggest_final_types
2700 && !outer_type->derived_types.length ())
2701 {
2702 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
2703 final_warning_records->type_warnings.safe_grow_cleared
2704 (odr_types.length ());
2705 final_warning_records->type_warnings[outer_type->id].count++;
2706 final_warning_records->type_warnings[outer_type->id].dyn_count
2707 += final_warning_records->dyn_count;
2708 final_warning_records->type_warnings[outer_type->id].type
2709 = outer_type->type;
2710 (*slot)->type_warning = outer_type->id + 1;
2711 }
2712 if (complete
2713 && warn_suggest_final_methods
2714 && nodes.length () == 1
2715 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
2716 outer_type->type))
2717 {
2718 bool existed;
2719 struct decl_warn_count &c =
2720 final_warning_records->decl_warnings.get_or_insert
2721 (nodes[0]->decl, &existed);
2722
2723 if (existed)
2724 {
2725 c.count++;
2726 c.dyn_count += final_warning_records->dyn_count;
2727 }
2728 else
2729 {
2730 c.count = 1;
2731 c.dyn_count = final_warning_records->dyn_count;
2732 c.decl = nodes[0]->decl;
2733 }
2734 (*slot)->decl_warning = nodes[0]->decl;
2735 }
2736 }
2737 complete = false;
2738 }
2739 }
2740
2741 if (!speculative)
2742 {
2743 /* Destructors are never called through construction virtual tables,
2744 because the type is always known. One of entries may be
2745 cxa_pure_virtual so look to at least two of them. */
2746 if (context.maybe_in_construction)
2747 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2748 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2749 context.maybe_in_construction = false;
2750 if (context.maybe_in_construction)
2751 {
2752 if (type != outer_type
2753 && (!skipped
2754 || (context.maybe_derived_type
2755 && !type_all_derivations_known_p (outer_type->type))))
2756 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2757 context.offset, nodes, &inserted,
2758 &matched_vtables, &complete);
2759 if (skipped)
2760 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2761 for (i = 0; i < bases_to_consider.length(); i++)
2762 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2763 }
2764 }
2765 }
2766
2767 (*slot)->targets = nodes;
2768 (*slot)->complete = complete;
2769 if (completep)
2770 *completep = complete;
2771
2772 timevar_pop (TV_IPA_VIRTUAL_CALL);
2773 return nodes;
2774 }
2775
2776 bool
2777 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
2778 vec<const decl_warn_count*> *vec)
2779 {
2780 vec->safe_push (&value);
2781 return true;
2782 }
2783
2784 /* Dump target list TARGETS into FILE. */
2785
2786 static void
2787 dump_targets (FILE *f, vec <cgraph_node *> targets)
2788 {
2789 unsigned int i;
2790
2791 for (i = 0; i < targets.length (); i++)
2792 {
2793 char *name = NULL;
2794 if (in_lto_p)
2795 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2796 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2797 if (in_lto_p)
2798 free (name);
2799 if (!targets[i]->definition)
2800 fprintf (f, " (no definition%s)",
2801 DECL_DECLARED_INLINE_P (targets[i]->decl)
2802 ? " inline" : "");
2803 }
2804 fprintf (f, "\n");
2805 }
2806
2807 /* Dump all possible targets of a polymorphic call. */
2808
2809 void
2810 dump_possible_polymorphic_call_targets (FILE *f,
2811 tree otr_type,
2812 HOST_WIDE_INT otr_token,
2813 const ipa_polymorphic_call_context &ctx)
2814 {
2815 vec <cgraph_node *> targets;
2816 bool final;
2817 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2818 unsigned int len;
2819
2820 if (!type)
2821 return;
2822 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2823 ctx,
2824 &final, NULL, false);
2825 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2826 print_generic_expr (f, type->type, TDF_SLIM);
2827 fprintf (f, " token %i\n", (int)otr_token);
2828
2829 ctx.dump (f);
2830
2831 fprintf (f, " %s%s%s%s\n ",
2832 final ? "This is a complete list." :
2833 "This is partial list; extra targets may be defined in other units.",
2834 ctx.maybe_in_construction ? " (base types included)" : "",
2835 ctx.maybe_derived_type ? " (derived types included)" : "",
2836 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2837 len = targets.length ();
2838 dump_targets (f, targets);
2839
2840 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2841 ctx,
2842 &final, NULL, true);
2843 gcc_assert (targets.length () <= len);
2844 if (targets.length () != len)
2845 {
2846 fprintf (f, " Speculative targets:");
2847 dump_targets (f, targets);
2848 }
2849 fprintf (f, "\n");
2850 }
2851
2852
2853 /* Return true if N can be possibly target of a polymorphic call of
2854 OTR_TYPE/OTR_TOKEN. */
2855
2856 bool
2857 possible_polymorphic_call_target_p (tree otr_type,
2858 HOST_WIDE_INT otr_token,
2859 const ipa_polymorphic_call_context &ctx,
2860 struct cgraph_node *n)
2861 {
2862 vec <cgraph_node *> targets;
2863 unsigned int i;
2864 enum built_in_function fcode;
2865 bool final;
2866
2867 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2868 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2869 == BUILT_IN_UNREACHABLE
2870 || fcode == BUILT_IN_TRAP))
2871 return true;
2872
2873 if (!odr_hash)
2874 return true;
2875 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2876 for (i = 0; i < targets.length (); i++)
2877 if (n->semantically_equivalent_p (targets[i]))
2878 return true;
2879
2880 /* At a moment we allow middle end to dig out new external declarations
2881 as a targets of polymorphic calls. */
2882 if (!final && !n->definition)
2883 return true;
2884 return false;
2885 }
2886
2887
2888
2889 /* Return true if N can be possibly target of a polymorphic call of
2890 OBJ_TYPE_REF expression REF in STMT. */
2891
2892 bool
2893 possible_polymorphic_call_target_p (tree ref,
2894 gimple stmt,
2895 struct cgraph_node *n)
2896 {
2897 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
2898 tree call_fn = gimple_call_fn (stmt);
2899
2900 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
2901 tree_to_uhwi
2902 (OBJ_TYPE_REF_TOKEN (call_fn)),
2903 context,
2904 n);
2905 }
2906
2907
2908 /* After callgraph construction new external nodes may appear.
2909 Add them into the graph. */
2910
2911 void
2912 update_type_inheritance_graph (void)
2913 {
2914 struct cgraph_node *n;
2915
2916 if (!odr_hash)
2917 return;
2918 free_polymorphic_call_targets_hash ();
2919 timevar_push (TV_IPA_INHERITANCE);
2920 /* We reconstruct the graph starting from types of all methods seen in the
2921 the unit. */
2922 FOR_EACH_FUNCTION (n)
2923 if (DECL_VIRTUAL_P (n->decl)
2924 && !n->definition
2925 && n->real_symbol_p ())
2926 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2927 true);
2928 timevar_pop (TV_IPA_INHERITANCE);
2929 }
2930
2931
2932 /* Return true if N looks like likely target of a polymorphic call.
2933 Rule out cxa_pure_virtual, noreturns, function declared cold and
2934 other obvious cases. */
2935
2936 bool
2937 likely_target_p (struct cgraph_node *n)
2938 {
2939 int flags;
2940 /* cxa_pure_virtual and similar things are not likely. */
2941 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2942 return false;
2943 flags = flags_from_decl_or_type (n->decl);
2944 if (flags & ECF_NORETURN)
2945 return false;
2946 if (lookup_attribute ("cold",
2947 DECL_ATTRIBUTES (n->decl)))
2948 return false;
2949 if (n->frequency < NODE_FREQUENCY_NORMAL)
2950 return false;
2951 /* If there are no live virtual tables referring the target,
2952 the only way the target can be called is an instance coming from other
2953 compilation unit; speculative devirtualization is built around an
2954 assumption that won't happen. */
2955 if (!referenced_from_vtable_p (n))
2956 return false;
2957 return true;
2958 }
2959
2960 /* Compare type warning records P1 and P2 and choose one with larger count;
2961 helper for qsort. */
2962
2963 int
2964 type_warning_cmp (const void *p1, const void *p2)
2965 {
2966 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
2967 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
2968
2969 if (t1->dyn_count < t2->dyn_count)
2970 return 1;
2971 if (t1->dyn_count > t2->dyn_count)
2972 return -1;
2973 return t2->count - t1->count;
2974 }
2975
2976 /* Compare decl warning records P1 and P2 and choose one with larger count;
2977 helper for qsort. */
2978
2979 int
2980 decl_warning_cmp (const void *p1, const void *p2)
2981 {
2982 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
2983 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
2984
2985 if (t1->dyn_count < t2->dyn_count)
2986 return 1;
2987 if (t1->dyn_count > t2->dyn_count)
2988 return -1;
2989 return t2->count - t1->count;
2990 }
2991
2992
2993 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
2994 context CTX. */
2995
2996 struct cgraph_node *
2997 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
2998 ipa_polymorphic_call_context ctx)
2999 {
3000 vec <cgraph_node *>targets
3001 = possible_polymorphic_call_targets
3002 (otr_type, otr_token, ctx, NULL, NULL, true);
3003 unsigned int i;
3004 struct cgraph_node *likely_target = NULL;
3005
3006 for (i = 0; i < targets.length (); i++)
3007 if (likely_target_p (targets[i]))
3008 {
3009 if (likely_target)
3010 return NULL;
3011 likely_target = targets[i];
3012 }
3013 if (!likely_target
3014 ||!likely_target->definition
3015 || DECL_EXTERNAL (likely_target->decl))
3016 return NULL;
3017
3018 /* Don't use an implicitly-declared destructor (c++/58678). */
3019 struct cgraph_node *non_thunk_target
3020 = likely_target->function_symbol ();
3021 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3022 return NULL;
3023 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3024 && likely_target->can_be_discarded_p ())
3025 return NULL;
3026 return likely_target;
3027 }
3028
3029 /* The ipa-devirt pass.
3030 When polymorphic call has only one likely target in the unit,
3031 turn it into a speculative call. */
3032
3033 static unsigned int
3034 ipa_devirt (void)
3035 {
3036 struct cgraph_node *n;
3037 hash_set<void *> bad_call_targets;
3038 struct cgraph_edge *e;
3039
3040 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3041 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3042 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3043 int ndropped = 0;
3044
3045 if (!odr_types_ptr)
3046 return 0;
3047
3048 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3049 This is implemented by setting up final_warning_records that are updated
3050 by get_polymorphic_call_targets.
3051 We need to clear cache in this case to trigger recomputation of all
3052 entries. */
3053 if (warn_suggest_final_methods || warn_suggest_final_types)
3054 {
3055 final_warning_records = new (final_warning_record);
3056 final_warning_records->type_warnings = vNULL;
3057 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3058 free_polymorphic_call_targets_hash ();
3059 }
3060
3061 FOR_EACH_DEFINED_FUNCTION (n)
3062 {
3063 bool update = false;
3064 if (!opt_for_fn (n->decl, flag_devirtualize))
3065 continue;
3066 if (dump_file && n->indirect_calls)
3067 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3068 n->name (), n->order);
3069 for (e = n->indirect_calls; e; e = e->next_callee)
3070 if (e->indirect_info->polymorphic)
3071 {
3072 struct cgraph_node *likely_target = NULL;
3073 void *cache_token;
3074 bool final;
3075
3076 if (final_warning_records)
3077 final_warning_records->dyn_count = e->count;
3078
3079 vec <cgraph_node *>targets
3080 = possible_polymorphic_call_targets
3081 (e, &final, &cache_token, true);
3082 unsigned int i;
3083
3084 /* Trigger warnings by calculating non-speculative targets. */
3085 if (warn_suggest_final_methods || warn_suggest_final_types)
3086 possible_polymorphic_call_targets (e);
3087
3088 if (dump_file)
3089 dump_possible_polymorphic_call_targets
3090 (dump_file, e);
3091
3092 npolymorphic++;
3093
3094 /* See if the call can be devirtualized by means of ipa-prop's
3095 polymorphic call context propagation. If not, we can just
3096 forget about this call being polymorphic and avoid some heavy
3097 lifting in remove_unreachable_nodes that will otherwise try to
3098 keep all possible targets alive until inlining and in the inliner
3099 itself.
3100
3101 This may need to be revisited once we add further ways to use
3102 the may edges, but it is a resonable thing to do right now. */
3103
3104 if ((e->indirect_info->param_index == -1
3105 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3106 && e->indirect_info->vptr_changed))
3107 && !flag_ltrans_devirtualize)
3108 {
3109 e->indirect_info->polymorphic = false;
3110 ndropped++;
3111 if (dump_file)
3112 fprintf (dump_file, "Dropping polymorphic call info;"
3113 " it can not be used by ipa-prop\n");
3114 }
3115
3116 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3117 continue;
3118
3119 if (!e->maybe_hot_p ())
3120 {
3121 if (dump_file)
3122 fprintf (dump_file, "Call is cold\n\n");
3123 ncold++;
3124 continue;
3125 }
3126 if (e->speculative)
3127 {
3128 if (dump_file)
3129 fprintf (dump_file, "Call is already speculated\n\n");
3130 nspeculated++;
3131
3132 /* When dumping see if we agree with speculation. */
3133 if (!dump_file)
3134 continue;
3135 }
3136 if (bad_call_targets.contains (cache_token))
3137 {
3138 if (dump_file)
3139 fprintf (dump_file, "Target list is known to be useless\n\n");
3140 nmultiple++;
3141 continue;
3142 }
3143 for (i = 0; i < targets.length (); i++)
3144 if (likely_target_p (targets[i]))
3145 {
3146 if (likely_target)
3147 {
3148 likely_target = NULL;
3149 if (dump_file)
3150 fprintf (dump_file, "More than one likely target\n\n");
3151 nmultiple++;
3152 break;
3153 }
3154 likely_target = targets[i];
3155 }
3156 if (!likely_target)
3157 {
3158 bad_call_targets.add (cache_token);
3159 continue;
3160 }
3161 /* This is reached only when dumping; check if we agree or disagree
3162 with the speculation. */
3163 if (e->speculative)
3164 {
3165 struct cgraph_edge *e2;
3166 struct ipa_ref *ref;
3167 e->speculative_call_info (e2, e, ref);
3168 if (e2->callee->ultimate_alias_target ()
3169 == likely_target->ultimate_alias_target ())
3170 {
3171 fprintf (dump_file, "We agree with speculation\n\n");
3172 nok++;
3173 }
3174 else
3175 {
3176 fprintf (dump_file, "We disagree with speculation\n\n");
3177 nwrong++;
3178 }
3179 continue;
3180 }
3181 if (!likely_target->definition)
3182 {
3183 if (dump_file)
3184 fprintf (dump_file, "Target is not a definition\n\n");
3185 nnotdefined++;
3186 continue;
3187 }
3188 /* Do not introduce new references to external symbols. While we
3189 can handle these just well, it is common for programs to
3190 incorrectly with headers defining methods they are linked
3191 with. */
3192 if (DECL_EXTERNAL (likely_target->decl))
3193 {
3194 if (dump_file)
3195 fprintf (dump_file, "Target is external\n\n");
3196 nexternal++;
3197 continue;
3198 }
3199 /* Don't use an implicitly-declared destructor (c++/58678). */
3200 struct cgraph_node *non_thunk_target
3201 = likely_target->function_symbol ();
3202 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3203 {
3204 if (dump_file)
3205 fprintf (dump_file, "Target is artificial\n\n");
3206 nartificial++;
3207 continue;
3208 }
3209 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3210 && likely_target->can_be_discarded_p ())
3211 {
3212 if (dump_file)
3213 fprintf (dump_file, "Target is overwritable\n\n");
3214 noverwritable++;
3215 continue;
3216 }
3217 else if (dbg_cnt (devirt))
3218 {
3219 if (dump_enabled_p ())
3220 {
3221 location_t locus = gimple_location_safe (e->call_stmt);
3222 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3223 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3224 n->name (), n->order,
3225 likely_target->name (),
3226 likely_target->order);
3227 }
3228 if (!likely_target->can_be_discarded_p ())
3229 {
3230 cgraph_node *alias;
3231 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3232 if (alias)
3233 likely_target = alias;
3234 }
3235 nconverted++;
3236 update = true;
3237 e->make_speculative
3238 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3239 }
3240 }
3241 if (update)
3242 inline_update_overall_summary (n);
3243 }
3244 if (warn_suggest_final_methods || warn_suggest_final_types)
3245 {
3246 if (warn_suggest_final_types)
3247 {
3248 final_warning_records->type_warnings.qsort (type_warning_cmp);
3249 for (unsigned int i = 0;
3250 i < final_warning_records->type_warnings.length (); i++)
3251 if (final_warning_records->type_warnings[i].count)
3252 {
3253 tree type = final_warning_records->type_warnings[i].type;
3254 int count = final_warning_records->type_warnings[i].count;
3255 long long dyn_count
3256 = final_warning_records->type_warnings[i].dyn_count;
3257
3258 if (!dyn_count)
3259 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3260 OPT_Wsuggest_final_types, count,
3261 "Declaring type %qD final "
3262 "would enable devirtualization of %i call",
3263 "Declaring type %qD final "
3264 "would enable devirtualization of %i calls",
3265 type,
3266 count);
3267 else
3268 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3269 OPT_Wsuggest_final_types, count,
3270 "Declaring type %qD final "
3271 "would enable devirtualization of %i call "
3272 "executed %lli times",
3273 "Declaring type %qD final "
3274 "would enable devirtualization of %i calls "
3275 "executed %lli times",
3276 type,
3277 count,
3278 dyn_count);
3279 }
3280 }
3281
3282 if (warn_suggest_final_methods)
3283 {
3284 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3285
3286 final_warning_records->decl_warnings.traverse
3287 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3288 decl_warnings_vec.qsort (decl_warning_cmp);
3289 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3290 {
3291 tree decl = decl_warnings_vec[i]->decl;
3292 int count = decl_warnings_vec[i]->count;
3293 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3294
3295 if (!dyn_count)
3296 if (DECL_CXX_DESTRUCTOR_P (decl))
3297 warning_n (DECL_SOURCE_LOCATION (decl),
3298 OPT_Wsuggest_final_methods, count,
3299 "Declaring virtual destructor of %qD final "
3300 "would enable devirtualization of %i call",
3301 "Declaring virtual destructor of %qD final "
3302 "would enable devirtualization of %i calls",
3303 DECL_CONTEXT (decl), count);
3304 else
3305 warning_n (DECL_SOURCE_LOCATION (decl),
3306 OPT_Wsuggest_final_methods, count,
3307 "Declaring method %qD final "
3308 "would enable devirtualization of %i call",
3309 "Declaring method %qD final "
3310 "would enable devirtualization of %i calls",
3311 decl, count);
3312 else if (DECL_CXX_DESTRUCTOR_P (decl))
3313 warning_n (DECL_SOURCE_LOCATION (decl),
3314 OPT_Wsuggest_final_methods, count,
3315 "Declaring virtual destructor of %qD final "
3316 "would enable devirtualization of %i call "
3317 "executed %lli times",
3318 "Declaring virtual destructor of %qD final "
3319 "would enable devirtualization of %i calls "
3320 "executed %lli times",
3321 DECL_CONTEXT (decl), count, dyn_count);
3322 else
3323 warning_n (DECL_SOURCE_LOCATION (decl),
3324 OPT_Wsuggest_final_methods, count,
3325 "Declaring method %qD final "
3326 "would enable devirtualization of %i call "
3327 "executed %lli times",
3328 "Declaring method %qD final "
3329 "would enable devirtualization of %i calls "
3330 "executed %lli times",
3331 decl, count, dyn_count);
3332 }
3333 }
3334
3335 delete (final_warning_records);
3336 final_warning_records = 0;
3337 }
3338
3339 if (dump_file)
3340 fprintf (dump_file,
3341 "%i polymorphic calls, %i devirtualized,"
3342 " %i speculatively devirtualized, %i cold\n"
3343 "%i have multiple targets, %i overwritable,"
3344 " %i already speculated (%i agree, %i disagree),"
3345 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3346 npolymorphic, ndevirtualized, nconverted, ncold,
3347 nmultiple, noverwritable, nspeculated, nok, nwrong,
3348 nexternal, nnotdefined, nartificial, ndropped);
3349 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3350 }
3351
3352 namespace {
3353
3354 const pass_data pass_data_ipa_devirt =
3355 {
3356 IPA_PASS, /* type */
3357 "devirt", /* name */
3358 OPTGROUP_NONE, /* optinfo_flags */
3359 TV_IPA_DEVIRT, /* tv_id */
3360 0, /* properties_required */
3361 0, /* properties_provided */
3362 0, /* properties_destroyed */
3363 0, /* todo_flags_start */
3364 ( TODO_dump_symtab ), /* todo_flags_finish */
3365 };
3366
3367 class pass_ipa_devirt : public ipa_opt_pass_d
3368 {
3369 public:
3370 pass_ipa_devirt (gcc::context *ctxt)
3371 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3372 NULL, /* generate_summary */
3373 NULL, /* write_summary */
3374 NULL, /* read_summary */
3375 NULL, /* write_optimization_summary */
3376 NULL, /* read_optimization_summary */
3377 NULL, /* stmt_fixup */
3378 0, /* function_transform_todo_flags_start */
3379 NULL, /* function_transform */
3380 NULL) /* variable_transform */
3381 {}
3382
3383 /* opt_pass methods: */
3384 virtual bool gate (function *)
3385 {
3386 /* In LTO, always run the IPA passes and decide on function basis if the
3387 pass is enabled. */
3388 if (in_lto_p)
3389 return true;
3390 return (flag_devirtualize
3391 && (flag_devirtualize_speculatively
3392 || (warn_suggest_final_methods
3393 || warn_suggest_final_types))
3394 && optimize);
3395 }
3396
3397 virtual unsigned int execute (function *) { return ipa_devirt (); }
3398
3399 }; // class pass_ipa_devirt
3400
3401 } // anon namespace
3402
3403 ipa_opt_pass_d *
3404 make_pass_ipa_devirt (gcc::context *ctxt)
3405 {
3406 return new pass_ipa_devirt (ctxt);
3407 }
3408
3409 #include "gt-ipa-devirt.h"