]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
re PR ipa/65557 (ICE: SIGSEGV in hash_table<>::find_slot_with_hash() with -fdevirtual...
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "hash-set.h"
113 #include "machmode.h"
114 #include "hash-map.h"
115 #include "vec.h"
116 #include "double-int.h"
117 #include "input.h"
118 #include "alias.h"
119 #include "symtab.h"
120 #include "wide-int.h"
121 #include "inchash.h"
122 #include "tree.h"
123 #include "fold-const.h"
124 #include "print-tree.h"
125 #include "calls.h"
126 #include "predict.h"
127 #include "basic-block.h"
128 #include "is-a.h"
129 #include "plugin-api.h"
130 #include "hard-reg-set.h"
131 #include "function.h"
132 #include "ipa-ref.h"
133 #include "cgraph.h"
134 #include "hashtab.h"
135 #include "rtl.h"
136 #include "flags.h"
137 #include "statistics.h"
138 #include "real.h"
139 #include "fixed-value.h"
140 #include "insn-config.h"
141 #include "expmed.h"
142 #include "dojump.h"
143 #include "explow.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "tree-pass.h"
149 #include "target.h"
150 #include "hash-table.h"
151 #include "tree-pretty-print.h"
152 #include "ipa-utils.h"
153 #include "tree-ssa-alias.h"
154 #include "internal-fn.h"
155 #include "gimple-fold.h"
156 #include "gimple-expr.h"
157 #include "gimple.h"
158 #include "alloc-pool.h"
159 #include "symbol-summary.h"
160 #include "ipa-prop.h"
161 #include "ipa-inline.h"
162 #include "diagnostic.h"
163 #include "tree-dfa.h"
164 #include "demangle.h"
165 #include "dbgcnt.h"
166 #include "gimple-pretty-print.h"
167 #include "stor-layout.h"
168 #include "intl.h"
169 #include "streamer-hooks.h"
170 #include "lto-streamer.h"
171
172 /* Hash based set of pairs of types. */
173 typedef struct
174 {
175 tree first;
176 tree second;
177 } type_pair;
178
179 struct pair_traits : default_hashset_traits
180 {
181 static hashval_t
182 hash (type_pair p)
183 {
184 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
185 }
186 static bool
187 is_empty (type_pair p)
188 {
189 return p.first == NULL;
190 }
191 static bool
192 is_deleted (type_pair p ATTRIBUTE_UNUSED)
193 {
194 return false;
195 }
196 static bool
197 equal (const type_pair &a, const type_pair &b)
198 {
199 return a.first==b.first && a.second == b.second;
200 }
201 static void
202 mark_empty (type_pair &e)
203 {
204 e.first = NULL;
205 }
206 };
207
208 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
209 hash_set<type_pair,pair_traits> *);
210
211 static bool odr_violation_reported = false;
212
213
214 /* Pointer set of all call targets appearing in the cache. */
215 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
216
217 /* The node of type inheritance graph. For each type unique in
218 One Definition Rule (ODR) sense, we produce one node linking all
219 main variants of types equivalent to it, bases and derived types. */
220
221 struct GTY(()) odr_type_d
222 {
223 /* leader type. */
224 tree type;
225 /* All bases; built only for main variants of types. */
226 vec<odr_type> GTY((skip)) bases;
227 /* All derived types with virtual methods seen in unit;
228 built only for main variants of types. */
229 vec<odr_type> GTY((skip)) derived_types;
230
231 /* All equivalent types, if more than one. */
232 vec<tree, va_gc> *types;
233 /* Set of all equivalent types, if NON-NULL. */
234 hash_set<tree> * GTY((skip)) types_set;
235
236 /* Unique ID indexing the type in odr_types array. */
237 int id;
238 /* Is it in anonymous namespace? */
239 bool anonymous_namespace;
240 /* Do we know about all derivations of given type? */
241 bool all_derivations_known;
242 /* Did we report ODR violation here? */
243 bool odr_violated;
244 /* Set when virtual table without RTTI previaled table with. */
245 bool rtti_broken;
246 };
247
248 /* Return TRUE if all derived types of T are known and thus
249 we may consider the walk of derived type complete.
250
251 This is typically true only for final anonymous namespace types and types
252 defined within functions (that may be COMDAT and thus shared across units,
253 but with the same set of derived types). */
254
255 bool
256 type_all_derivations_known_p (const_tree t)
257 {
258 if (TYPE_FINAL_P (t))
259 return true;
260 if (flag_ltrans)
261 return false;
262 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
263 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
264 return true;
265 if (type_in_anonymous_namespace_p (t))
266 return true;
267 return (decl_function_context (TYPE_NAME (t)) != NULL);
268 }
269
270 /* Return TRUE if type's constructors are all visible. */
271
272 static bool
273 type_all_ctors_visible_p (tree t)
274 {
275 return !flag_ltrans
276 && symtab->state >= CONSTRUCTION
277 /* We can not always use type_all_derivations_known_p.
278 For function local types we must assume case where
279 the function is COMDAT and shared in between units.
280
281 TODO: These cases are quite easy to get, but we need
282 to keep track of C++ privatizing via -Wno-weak
283 as well as the IPA privatizing. */
284 && type_in_anonymous_namespace_p (t);
285 }
286
287 /* Return TRUE if type may have instance. */
288
289 static bool
290 type_possibly_instantiated_p (tree t)
291 {
292 tree vtable;
293 varpool_node *vnode;
294
295 /* TODO: Add abstract types here. */
296 if (!type_all_ctors_visible_p (t))
297 return true;
298
299 vtable = BINFO_VTABLE (TYPE_BINFO (t));
300 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
301 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
302 vnode = varpool_node::get (vtable);
303 return vnode && vnode->definition;
304 }
305
306 /* Hash used to unify ODR types based on their mangled name and for anonymous
307 namespace types. */
308
309 struct odr_name_hasher
310 {
311 typedef odr_type_d value_type;
312 typedef union tree_node compare_type;
313 static inline hashval_t hash (const value_type *);
314 static inline bool equal (const value_type *, const compare_type *);
315 static inline void remove (value_type *);
316 };
317
318 /* Has used to unify ODR types based on their associated virtual table.
319 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
320 only polymorphic types. Types with mangled names are inserted to both. */
321
322 struct odr_vtable_hasher:odr_name_hasher
323 {
324 static inline hashval_t hash (const value_type *);
325 static inline bool equal (const value_type *, const compare_type *);
326 };
327
328 /* Return type that was declared with T's name so that T is an
329 qualified variant of it. */
330
331 static inline tree
332 main_odr_variant (const_tree t)
333 {
334 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
335 return TREE_TYPE (TYPE_NAME (t));
336 /* Unnamed types and non-C++ produced types can be compared by variants. */
337 else
338 return TYPE_MAIN_VARIANT (t);
339 }
340
341 static bool
342 can_be_name_hashed_p (tree t)
343 {
344 return (!in_lto_p || type_in_anonymous_namespace_p (t)
345 || (TYPE_NAME (t) && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))));
346 }
347
348 /* Hash type by its ODR name. */
349
350 static hashval_t
351 hash_odr_name (const_tree t)
352 {
353 gcc_checking_assert (main_odr_variant (t) == t);
354
355 /* If not in LTO, all main variants are unique, so we can do
356 pointer hash. */
357 if (!in_lto_p)
358 return htab_hash_pointer (t);
359
360 /* Anonymous types are unique. */
361 if (type_in_anonymous_namespace_p (t))
362 return htab_hash_pointer (t);
363
364 gcc_checking_assert (TYPE_NAME (t)
365 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
366 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
367 }
368
369 /* Return the computed hashcode for ODR_TYPE. */
370
371 inline hashval_t
372 odr_name_hasher::hash (const value_type *odr_type)
373 {
374 return hash_odr_name (odr_type->type);
375 }
376
377 static bool
378 can_be_vtable_hashed_p (tree t)
379 {
380 /* vtable hashing can distinguish only main variants. */
381 if (TYPE_MAIN_VARIANT (t) != t)
382 return false;
383 /* Anonymous namespace types are always handled by name hash. */
384 if (type_in_anonymous_namespace_p (t))
385 return false;
386 return (TREE_CODE (t) == RECORD_TYPE
387 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
388 }
389
390 /* Hash type by assembler name of its vtable. */
391
392 static hashval_t
393 hash_odr_vtable (const_tree t)
394 {
395 tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
396 inchash::hash hstate;
397
398 gcc_checking_assert (in_lto_p);
399 gcc_checking_assert (!type_in_anonymous_namespace_p (t));
400 gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
401 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
402 gcc_checking_assert (main_odr_variant (t) == t);
403
404 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
405 {
406 add_expr (TREE_OPERAND (v, 1), hstate);
407 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
408 }
409
410 hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
411 return hstate.end ();
412 }
413
414 /* Return the computed hashcode for ODR_TYPE. */
415
416 inline hashval_t
417 odr_vtable_hasher::hash (const value_type *odr_type)
418 {
419 return hash_odr_vtable (odr_type->type);
420 }
421
422 /* For languages with One Definition Rule, work out if
423 types are the same based on their name.
424
425 This is non-trivial for LTO where minor differences in
426 the type representation may have prevented type merging
427 to merge two copies of otherwise equivalent type.
428
429 Until we start streaming mangled type names, this function works
430 only for polymorphic types.
431
432 When STRICT is true, we compare types by their names for purposes of
433 ODR violation warnings. When strict is false, we consider variants
434 equivalent, becuase it is all that matters for devirtualization machinery.
435 */
436
437 bool
438 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
439 {
440 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
441
442 type1 = main_odr_variant (type1);
443 type2 = main_odr_variant (type2);
444 if (!strict)
445 {
446 type1 = TYPE_MAIN_VARIANT (type1);
447 type2 = TYPE_MAIN_VARIANT (type2);
448 }
449
450 if (type1 == type2)
451 return true;
452
453 if (!in_lto_p)
454 return false;
455
456 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
457 on the corresponding TYPE_STUB_DECL. */
458 if (type_in_anonymous_namespace_p (type1)
459 || type_in_anonymous_namespace_p (type2))
460 return false;
461
462
463 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
464
465 Ideally we should never need types without ODR names here. It can however
466 happen in two cases:
467
468 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
469 Here testing for equivalence is safe, since their MAIN_VARIANTs are
470 unique.
471 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
472 establish precise ODR equivalency, but for correctness we care only
473 about equivalency on complete polymorphic types. For these we can
474 compare assembler names of their virtual tables. */
475 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
476 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
477 {
478 /* See if types are obviously different (i.e. different codes
479 or polymorphic wrt non-polymorphic). This is not strictly correct
480 for ODR violating programs, but we can't do better without streaming
481 ODR names. */
482 if (TREE_CODE (type1) != TREE_CODE (type2))
483 return false;
484 if (TREE_CODE (type1) == RECORD_TYPE
485 && (TYPE_BINFO (type1) == NULL_TREE)
486 != (TYPE_BINFO (type1) == NULL_TREE))
487 return false;
488 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
489 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
490 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
491 return false;
492
493 /* At the moment we have no way to establish ODR equivalence at LTO
494 other than comparing virtual table pointers of polymorphic types.
495 Eventually we should start saving mangled names in TYPE_NAME.
496 Then this condition will become non-trivial. */
497
498 if (TREE_CODE (type1) == RECORD_TYPE
499 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
500 && BINFO_VTABLE (TYPE_BINFO (type1))
501 && BINFO_VTABLE (TYPE_BINFO (type2)))
502 {
503 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
504 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
505 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
506 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
507 return (operand_equal_p (TREE_OPERAND (v1, 1),
508 TREE_OPERAND (v2, 1), 0)
509 && DECL_ASSEMBLER_NAME
510 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
511 == DECL_ASSEMBLER_NAME
512 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
513 }
514 gcc_unreachable ();
515 }
516 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
517 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
518 }
519
520 /* Return true if we can decide on ODR equivalency.
521
522 In non-LTO it is always decide, in LTO however it depends in the type has
523 ODR info attached.
524
525 When STRICT is false, compare main variants. */
526
527 bool
528 types_odr_comparable (tree t1, tree t2, bool strict)
529 {
530 return (!in_lto_p
531 || (strict ? main_odr_variant (t1) == main_odr_variant (t2)
532 : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
533 || (odr_type_p (t1) && odr_type_p (t2))
534 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
535 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
536 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
537 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
538 }
539
540 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
541 known, be conservative and return false. */
542
543 bool
544 types_must_be_same_for_odr (tree t1, tree t2)
545 {
546 if (types_odr_comparable (t1, t2))
547 return types_same_for_odr (t1, t2);
548 else
549 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
550 }
551
552 /* Compare types T1 and T2 and return true if they are
553 equivalent. */
554
555 inline bool
556 odr_name_hasher::equal (const value_type *o1, const compare_type *t2)
557 {
558 tree t1 = o1->type;
559
560 gcc_checking_assert (main_odr_variant (t2) == t2);
561 gcc_checking_assert (main_odr_variant (t1) == t1);
562 if (t1 == t2)
563 return true;
564 if (!in_lto_p)
565 return false;
566 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
567 on the corresponding TYPE_STUB_DECL. */
568 if (type_in_anonymous_namespace_p (t1)
569 || type_in_anonymous_namespace_p (t2))
570 return false;
571 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
572 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
573 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
574 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
575 }
576
577 /* Compare types T1 and T2 and return true if they are
578 equivalent. */
579
580 inline bool
581 odr_vtable_hasher::equal (const value_type *o1, const compare_type *t2)
582 {
583 tree t1 = o1->type;
584
585 gcc_checking_assert (main_odr_variant (t2) == t2);
586 gcc_checking_assert (main_odr_variant (t1) == t1);
587 gcc_checking_assert (in_lto_p);
588 t1 = TYPE_MAIN_VARIANT (t1);
589 t2 = TYPE_MAIN_VARIANT (t2);
590 if (t1 == t2)
591 return true;
592 tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
593 tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
594 return (operand_equal_p (TREE_OPERAND (v1, 1),
595 TREE_OPERAND (v2, 1), 0)
596 && DECL_ASSEMBLER_NAME
597 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
598 == DECL_ASSEMBLER_NAME
599 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
600 }
601
602 /* Free ODR type V. */
603
604 inline void
605 odr_name_hasher::remove (value_type *v)
606 {
607 v->bases.release ();
608 v->derived_types.release ();
609 if (v->types_set)
610 delete v->types_set;
611 ggc_free (v);
612 }
613
614 /* ODR type hash used to look up ODR type based on tree type node. */
615
616 typedef hash_table<odr_name_hasher> odr_hash_type;
617 static odr_hash_type *odr_hash;
618 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
619 static odr_vtable_hash_type *odr_vtable_hash;
620
621 /* ODR types are also stored into ODR_TYPE vector to allow consistent
622 walking. Bases appear before derived types. Vector is garbage collected
623 so we won't end up visiting empty types. */
624
625 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
626 #define odr_types (*odr_types_ptr)
627
628 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
629 void
630 set_type_binfo (tree type, tree binfo)
631 {
632 for (; type; type = TYPE_NEXT_VARIANT (type))
633 if (COMPLETE_TYPE_P (type))
634 TYPE_BINFO (type) = binfo;
635 else
636 gcc_assert (!TYPE_BINFO (type));
637 }
638
639 /* Compare T2 and T2 based on name or structure. */
640
641 static bool
642 odr_subtypes_equivalent_p (tree t1, tree t2,
643 hash_set<type_pair,pair_traits> *visited)
644 {
645 bool an1, an2;
646
647 /* This can happen in incomplete types that should be handled earlier. */
648 gcc_assert (t1 && t2);
649
650 t1 = main_odr_variant (t1);
651 t2 = main_odr_variant (t2);
652 if (t1 == t2)
653 return true;
654
655 /* Anonymous namespace types must match exactly. */
656 an1 = type_in_anonymous_namespace_p (t1);
657 an2 = type_in_anonymous_namespace_p (t2);
658 if (an1 != an2 || an1)
659 return false;
660
661 /* For ODR types be sure to compare their names.
662 To support -wno-odr-type-merging we allow one type to be non-ODR
663 and other ODR even though it is a violation. */
664 if (types_odr_comparable (t1, t2, true))
665 {
666 if (!types_same_for_odr (t1, t2, true))
667 return false;
668 /* Limit recursion: If subtypes are ODR types and we know
669 that they are same, be happy. */
670 if (!get_odr_type (t1, true)->odr_violated)
671 return true;
672 }
673
674 /* Component types, builtins and possibly violating ODR types
675 have to be compared structurally. */
676 if (TREE_CODE (t1) != TREE_CODE (t2))
677 return false;
678 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
679 return false;
680
681 type_pair pair={t1,t2};
682 if (TYPE_UID (t1) > TYPE_UID (t2))
683 {
684 pair.first = t2;
685 pair.second = t1;
686 }
687 if (visited->add (pair))
688 return true;
689 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
690 }
691
692 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
693 violation warnings. */
694
695 void
696 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
697 {
698 int n1, n2;
699
700 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
701 {
702 odr_violation_reported = true;
703 if (DECL_VIRTUAL_P (prevailing->decl))
704 {
705 varpool_node *tmp = prevailing;
706 prevailing = vtable;
707 vtable = tmp;
708 }
709 if (warning_at (DECL_SOURCE_LOCATION
710 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
711 OPT_Wodr,
712 "virtual table of type %qD violates one definition rule",
713 DECL_CONTEXT (vtable->decl)))
714 inform (DECL_SOURCE_LOCATION (prevailing->decl),
715 "variable of same assembler name as the virtual table is "
716 "defined in another translation unit");
717 return;
718 }
719 if (!prevailing->definition || !vtable->definition)
720 return;
721
722 /* If we do not stream ODR type info, do not bother to do useful compare. */
723 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
724 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
725 return;
726
727 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
728
729 if (class_type->odr_violated)
730 return;
731
732 for (n1 = 0, n2 = 0; true; n1++, n2++)
733 {
734 struct ipa_ref *ref1, *ref2;
735 bool end1, end2;
736
737 end1 = !prevailing->iterate_reference (n1, ref1);
738 end2 = !vtable->iterate_reference (n2, ref2);
739
740 /* !DECL_VIRTUAL_P means RTTI entry;
741 We warn when RTTI is lost because non-RTTI previals; we silently
742 accept the other case. */
743 while (!end2
744 && (end1
745 || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
746 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
747 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
748 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
749 {
750 if (!class_type->rtti_broken
751 && warning_at (DECL_SOURCE_LOCATION
752 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
753 OPT_Wodr,
754 "virtual table of type %qD contains RTTI "
755 "information",
756 DECL_CONTEXT (vtable->decl)))
757 {
758 inform (DECL_SOURCE_LOCATION
759 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
760 "but is prevailed by one without from other translation "
761 "unit");
762 inform (DECL_SOURCE_LOCATION
763 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
764 "RTTI will not work on this type");
765 class_type->rtti_broken = true;
766 }
767 n2++;
768 end2 = !vtable->iterate_reference (n2, ref2);
769 }
770 while (!end1
771 && (end2
772 || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
773 != DECL_ASSEMBLER_NAME (ref1->referred->decl)
774 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
775 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
776 {
777 n1++;
778 end1 = !prevailing->iterate_reference (n1, ref1);
779 }
780
781 /* Finished? */
782 if (end1 && end2)
783 {
784 /* Extra paranoia; compare the sizes. We do not have information
785 about virtual inheritance offsets, so just be sure that these
786 match.
787 Do this as very last check so the not very informative error
788 is not output too often. */
789 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
790 {
791 class_type->odr_violated = true;
792 if (warning_at (DECL_SOURCE_LOCATION
793 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
794 OPT_Wodr,
795 "virtual table of type %qD violates "
796 "one definition rule ",
797 DECL_CONTEXT (vtable->decl)))
798 {
799 inform (DECL_SOURCE_LOCATION
800 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
801 "the conflicting type defined in another translation "
802 "unit has virtual table of different size");
803 }
804 }
805 return;
806 }
807
808 if (!end1 && !end2)
809 {
810 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
811 == DECL_ASSEMBLER_NAME (ref2->referred->decl))
812 continue;
813
814 class_type->odr_violated = true;
815
816 /* If the loops above stopped on non-virtual pointer, we have
817 mismatch in RTTI information mangling. */
818 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
819 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
820 {
821 if (warning_at (DECL_SOURCE_LOCATION
822 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
823 OPT_Wodr,
824 "virtual table of type %qD violates "
825 "one definition rule ",
826 DECL_CONTEXT (vtable->decl)))
827 {
828 inform (DECL_SOURCE_LOCATION
829 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
830 "the conflicting type defined in another translation "
831 "unit with different RTTI information");
832 }
833 return;
834 }
835 /* At this point both REF1 and REF2 points either to virtual table
836 or virtual method. If one points to virtual table and other to
837 method we can complain the same way as if one table was shorter
838 than other pointing out the extra method. */
839 if (TREE_CODE (ref1->referred->decl)
840 != TREE_CODE (ref2->referred->decl))
841 {
842 if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
843 end1 = true;
844 else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
845 end2 = true;
846 }
847 }
848
849 class_type->odr_violated = true;
850
851 /* Complain about size mismatch. Either we have too many virutal
852 functions or too many virtual table pointers. */
853 if (end1 || end2)
854 {
855 if (end1)
856 {
857 varpool_node *tmp = prevailing;
858 prevailing = vtable;
859 vtable = tmp;
860 ref1 = ref2;
861 }
862 if (warning_at (DECL_SOURCE_LOCATION
863 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
864 OPT_Wodr,
865 "virtual table of type %qD violates "
866 "one definition rule",
867 DECL_CONTEXT (vtable->decl)))
868 {
869 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
870 {
871 inform (DECL_SOURCE_LOCATION
872 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
873 "the conflicting type defined in another translation "
874 "unit");
875 inform (DECL_SOURCE_LOCATION
876 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
877 "contains additional virtual method %qD",
878 ref1->referred->decl);
879 }
880 else
881 {
882 inform (DECL_SOURCE_LOCATION
883 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
884 "the conflicting type defined in another translation "
885 "unit has virtual table table with more entries");
886 }
887 }
888 return;
889 }
890
891 /* And in the last case we have either mistmatch in between two virtual
892 methods or two virtual table pointers. */
893 if (warning_at (DECL_SOURCE_LOCATION
894 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
895 "virtual table of type %qD violates "
896 "one definition rule ",
897 DECL_CONTEXT (vtable->decl)))
898 {
899 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
900 {
901 inform (DECL_SOURCE_LOCATION
902 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
903 "the conflicting type defined in another translation "
904 "unit");
905 gcc_assert (TREE_CODE (ref2->referred->decl)
906 == FUNCTION_DECL);
907 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
908 "virtual method %qD", ref1->referred->decl);
909 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
910 "ought to match virtual method %qD but does not",
911 ref2->referred->decl);
912 }
913 else
914 inform (DECL_SOURCE_LOCATION
915 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
916 "the conflicting type defined in another translation "
917 "unit has virtual table table with different contents");
918 return;
919 }
920 }
921 }
922
923 /* Output ODR violation warning about T1 and T2 with REASON.
924 Display location of ST1 and ST2 if REASON speaks about field or
925 method of the type.
926 If WARN is false, do nothing. Set WARNED if warning was indeed
927 output. */
928
929 void
930 warn_odr (tree t1, tree t2, tree st1, tree st2,
931 bool warn, bool *warned, const char *reason)
932 {
933 tree decl2 = TYPE_NAME (t2);
934 if (warned)
935 *warned = false;
936
937 if (!warn || !TYPE_NAME(t1))
938 return;
939
940 /* ODR warnings are output druing LTO streaming; we must apply location
941 cache for potential warnings to be output correctly. */
942 lto_location_cache::current_cache->apply_location_cache ();
943
944 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
945 "type %qT violates one definition rule",
946 t1))
947 return;
948 if (!st1 && !st2)
949 ;
950 /* For FIELD_DECL support also case where one of fields is
951 NULL - this is used when the structures have mismatching number of
952 elements. */
953 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
954 {
955 inform (DECL_SOURCE_LOCATION (decl2),
956 "a different type is defined in another translation unit");
957 if (!st1)
958 {
959 st1 = st2;
960 st2 = NULL;
961 }
962 inform (DECL_SOURCE_LOCATION (st1),
963 "the first difference of corresponding definitions is field %qD",
964 st1);
965 if (st2)
966 decl2 = st2;
967 }
968 else if (TREE_CODE (st1) == FUNCTION_DECL)
969 {
970 inform (DECL_SOURCE_LOCATION (decl2),
971 "a different type is defined in another translation unit");
972 inform (DECL_SOURCE_LOCATION (st1),
973 "the first difference of corresponding definitions is method %qD",
974 st1);
975 decl2 = st2;
976 }
977 else
978 return;
979 inform (DECL_SOURCE_LOCATION (decl2), reason);
980
981 if (warned)
982 *warned = true;
983 }
984
985 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
986 because they are used on same place in ODR matching types.
987 They are not; inform the user. */
988
989 void
990 warn_types_mismatch (tree t1, tree t2)
991 {
992 /* If types have names and they are different, it is most informative to
993 output those. */
994 if (TYPE_NAME (t1) && TYPE_NAME (t2)
995 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
996 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
997 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
998 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
999 {
1000 char *name1 = xstrdup (cplus_demangle
1001 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1002 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1003 char *name2 = cplus_demangle
1004 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1005 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1006 if (name1 && name2 && strcmp (name1, name2))
1007 {
1008 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1009 "type name %<%s%> should match type name %<%s%>",
1010 name1, name2);
1011 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1012 "the incompatible type is defined here");
1013 free (name1);
1014 return;
1015 }
1016 free (name1);
1017 }
1018 /* It is a quite common bug to reference anonymous namespace type in
1019 non-anonymous namespace class. */
1020 if (type_in_anonymous_namespace_p (t1)
1021 || type_in_anonymous_namespace_p (t2))
1022 {
1023 if (!type_in_anonymous_namespace_p (t1))
1024 {
1025 tree tmp = t1;;
1026 t1 = t2;
1027 t2 = tmp;
1028 }
1029 if (TYPE_NAME (t1) && TYPE_NAME (t2))
1030 {
1031 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1032 "type %qT defined in anonymous namespace can not match "
1033 "type %qT",
1034 t1, t2);
1035 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1036 "the incompatible type defined in anonymous namespace in "
1037 "another translation unit");
1038 }
1039 else
1040 inform (UNKNOWN_LOCATION,
1041 "types in anonymous namespace does not match across "
1042 "translation unit boundary");
1043 return;
1044 }
1045 /* A tricky case are component types. Often they appear the same in source
1046 code and the mismatch is dragged in by type they are build from.
1047 Look for those differences in subtypes and try to be informative. In other
1048 cases just output nothing because the source code is probably different
1049 and in this case we already output a all necessary info. */
1050 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1051 {
1052 if (TREE_CODE (t1) == TREE_CODE (t2))
1053 {
1054 hash_set<type_pair,pair_traits> visited;
1055 if (TREE_CODE (t1) == ARRAY_TYPE
1056 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1057 {
1058 tree i1 = TYPE_DOMAIN (t1);
1059 tree i2 = TYPE_DOMAIN (t2);
1060
1061 if (i1 && i2
1062 && TYPE_MAX_VALUE (i1)
1063 && TYPE_MAX_VALUE (i2)
1064 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1065 TYPE_MAX_VALUE (i2), 0))
1066 {
1067 inform (UNKNOWN_LOCATION,
1068 "array types have different bounds");
1069 return;
1070 }
1071 }
1072 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1073 && !odr_subtypes_equivalent_p (TREE_TYPE (t1),
1074 TREE_TYPE (t2),
1075 &visited))
1076 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1077 else if (TREE_CODE (t1) == METHOD_TYPE
1078 || TREE_CODE (t1) == FUNCTION_TYPE)
1079 {
1080 tree parms1, parms2;
1081 int count = 1;
1082
1083 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1084 &visited))
1085 {
1086 inform (UNKNOWN_LOCATION, "return value type mismatch");
1087 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1088 return;
1089 }
1090 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1091 parms1 && parms2;
1092 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1093 count++)
1094 {
1095 if (!odr_subtypes_equivalent_p
1096 (TREE_VALUE (parms1), TREE_VALUE (parms2), &visited))
1097 {
1098 inform (UNKNOWN_LOCATION,
1099 "type mismatch in parameter %i", count);
1100 warn_types_mismatch (TREE_VALUE (parms1),
1101 TREE_VALUE (parms2));
1102 return;
1103 }
1104 }
1105 if (parms1 || parms2)
1106 {
1107 inform (UNKNOWN_LOCATION,
1108 "types have different parameter counts");
1109 return;
1110 }
1111 }
1112 }
1113 return;
1114 }
1115 /* This should not happen but if it does, the warning would not be helpful.
1116 TODO: turn it into assert next stage1. */
1117 if (TYPE_NAME (t1) == TYPE_NAME (t2))
1118 return;
1119 /* In Firefox it is a common bug to have same types but in
1120 different namespaces. Be a bit more informative on
1121 this. */
1122 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
1123 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
1124 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
1125 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
1126 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
1127 DECL_NAME (TYPE_CONTEXT (t2))))))
1128 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1129 "type %qT should match type %qT but is defined "
1130 "in different namespace ",
1131 t1, t2);
1132 else if (types_odr_comparable (t1, t2, true)
1133 && types_same_for_odr (t1, t2, true))
1134 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1135 "type %qT should match type %qT that itself violate "
1136 "one definition rule",
1137 t1, t2);
1138 else
1139 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1140 "type %qT should match type %qT",
1141 t1, t2);
1142 if (DECL_SOURCE_LOCATION (TYPE_NAME (t2)) > BUILTINS_LOCATION)
1143 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1144 "the incompatible type is defined here");
1145 }
1146
1147 /* Compare T1 and T2, report ODR violations if WARN is true and set
1148 WARNED to true if anything is reported. Return true if types match.
1149 If true is returned, the types are also compatible in the sense of
1150 gimple_canonical_types_compatible_p. */
1151
1152 static bool
1153 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1154 hash_set<type_pair,pair_traits> *visited)
1155 {
1156 /* Check first for the obvious case of pointer identity. */
1157 if (t1 == t2)
1158 return true;
1159 gcc_assert (!type_in_anonymous_namespace_p (t1));
1160 gcc_assert (!type_in_anonymous_namespace_p (t2));
1161
1162 /* Can't be the same type if the types don't have the same code. */
1163 if (TREE_CODE (t1) != TREE_CODE (t2))
1164 {
1165 warn_odr (t1, t2, NULL, NULL, warn, warned,
1166 G_("a different type is defined in another translation unit"));
1167 return false;
1168 }
1169
1170 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1171 {
1172 warn_odr (t1, t2, NULL, NULL, warn, warned,
1173 G_("a type with different qualifiers is defined in another "
1174 "translation unit"));
1175 return false;
1176 }
1177
1178 if (comp_type_attributes (t1, t2) != 1)
1179 {
1180 warn_odr (t1, t2, NULL, NULL, warn, warned,
1181 G_("a type with attributes "
1182 "is defined in another translation unit"));
1183 return false;
1184 }
1185
1186 if (TREE_CODE (t1) == ENUMERAL_TYPE
1187 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1188 {
1189 tree v1, v2;
1190 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1191 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1192 {
1193 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1194 {
1195 warn_odr (t1, t2, NULL, NULL, warn, warned,
1196 G_("an enum with different value name"
1197 " is defined in another translation unit"));
1198 return false;
1199 }
1200 if (TREE_VALUE (v1) != TREE_VALUE (v2)
1201 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1202 DECL_INITIAL (TREE_VALUE (v2)), 0))
1203 {
1204 warn_odr (t1, t2, NULL, NULL, warn, warned,
1205 G_("an enum with different values is defined"
1206 " in another translation unit"));
1207 return false;
1208 }
1209 }
1210 if (v1 || v2)
1211 {
1212 warn_odr (t1, t2, NULL, NULL, warn, warned,
1213 G_("an enum with mismatching number of values "
1214 "is defined in another translation unit"));
1215 return false;
1216 }
1217 }
1218
1219 /* Non-aggregate types can be handled cheaply. */
1220 if (INTEGRAL_TYPE_P (t1)
1221 || SCALAR_FLOAT_TYPE_P (t1)
1222 || FIXED_POINT_TYPE_P (t1)
1223 || TREE_CODE (t1) == VECTOR_TYPE
1224 || TREE_CODE (t1) == COMPLEX_TYPE
1225 || TREE_CODE (t1) == OFFSET_TYPE
1226 || POINTER_TYPE_P (t1))
1227 {
1228 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1229 {
1230 warn_odr (t1, t2, NULL, NULL, warn, warned,
1231 G_("a type with different precision is defined "
1232 "in another translation unit"));
1233 return false;
1234 }
1235 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1236 {
1237 warn_odr (t1, t2, NULL, NULL, warn, warned,
1238 G_("a type with different signedness is defined "
1239 "in another translation unit"));
1240 return false;
1241 }
1242
1243 if (TREE_CODE (t1) == INTEGER_TYPE
1244 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1245 {
1246 /* char WRT uint_8? */
1247 warn_odr (t1, t2, NULL, NULL, warn, warned,
1248 G_("a different type is defined in another "
1249 "translation unit"));
1250 return false;
1251 }
1252
1253 /* For canonical type comparisons we do not want to build SCCs
1254 so we cannot compare pointed-to types. But we can, for now,
1255 require the same pointed-to type kind and match what
1256 useless_type_conversion_p would do. */
1257 if (POINTER_TYPE_P (t1))
1258 {
1259 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1260 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1261 {
1262 warn_odr (t1, t2, NULL, NULL, warn, warned,
1263 G_("it is defined as a pointer in different address "
1264 "space in another translation unit"));
1265 return false;
1266 }
1267
1268 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1269 {
1270 warn_odr (t1, t2, NULL, NULL, warn, warned,
1271 G_("it is defined as a pointer to different type "
1272 "in another translation unit"));
1273 if (warn && warned)
1274 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1275 return false;
1276 }
1277 }
1278
1279 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1280 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1281 {
1282 /* Probably specific enough. */
1283 warn_odr (t1, t2, NULL, NULL, warn, warned,
1284 G_("a different type is defined "
1285 "in another translation unit"));
1286 if (warn && warned)
1287 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1288 return false;
1289 }
1290 }
1291 /* Do type-specific comparisons. */
1292 else switch (TREE_CODE (t1))
1293 {
1294 case ARRAY_TYPE:
1295 {
1296 /* Array types are the same if the element types are the same and
1297 the number of elements are the same. */
1298 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1299 {
1300 warn_odr (t1, t2, NULL, NULL, warn, warned,
1301 G_("a different type is defined in another "
1302 "translation unit"));
1303 if (warn && warned)
1304 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1305 }
1306 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1307 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1308 == TYPE_NONALIASED_COMPONENT (t2));
1309
1310 tree i1 = TYPE_DOMAIN (t1);
1311 tree i2 = TYPE_DOMAIN (t2);
1312
1313 /* For an incomplete external array, the type domain can be
1314 NULL_TREE. Check this condition also. */
1315 if (i1 == NULL_TREE || i2 == NULL_TREE)
1316 return true;
1317
1318 tree min1 = TYPE_MIN_VALUE (i1);
1319 tree min2 = TYPE_MIN_VALUE (i2);
1320 tree max1 = TYPE_MAX_VALUE (i1);
1321 tree max2 = TYPE_MAX_VALUE (i2);
1322
1323 /* In C++, minimums should be always 0. */
1324 gcc_assert (min1 == min2);
1325 if (!operand_equal_p (max1, max2, 0))
1326 {
1327 warn_odr (t1, t2, NULL, NULL, warn, warned,
1328 G_("an array of different size is defined "
1329 "in another translation unit"));
1330 return false;
1331 }
1332 }
1333 break;
1334
1335 case METHOD_TYPE:
1336 case FUNCTION_TYPE:
1337 /* Function types are the same if the return type and arguments types
1338 are the same. */
1339 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1340 {
1341 warn_odr (t1, t2, NULL, NULL, warn, warned,
1342 G_("has different return value "
1343 "in another translation unit"));
1344 if (warn && warned)
1345 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1346 return false;
1347 }
1348
1349 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
1350 return true;
1351 else
1352 {
1353 tree parms1, parms2;
1354
1355 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1356 parms1 && parms2;
1357 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1358 {
1359 if (!odr_subtypes_equivalent_p
1360 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1361 {
1362 warn_odr (t1, t2, NULL, NULL, warn, warned,
1363 G_("has different parameters in another "
1364 "translation unit"));
1365 if (warn && warned)
1366 warn_types_mismatch (TREE_VALUE (parms1),
1367 TREE_VALUE (parms2));
1368 return false;
1369 }
1370 }
1371
1372 if (parms1 || parms2)
1373 {
1374 warn_odr (t1, t2, NULL, NULL, warn, warned,
1375 G_("has different parameters "
1376 "in another translation unit"));
1377 return false;
1378 }
1379
1380 return true;
1381 }
1382
1383 case RECORD_TYPE:
1384 case UNION_TYPE:
1385 case QUAL_UNION_TYPE:
1386 {
1387 tree f1, f2;
1388
1389 /* For aggregate types, all the fields must be the same. */
1390 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1391 {
1392 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1393 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1394 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1395 {
1396 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1397 warn_odr (t1, t2, NULL, NULL, warn, warned,
1398 G_("a type defined in another translation unit "
1399 "is not polymorphic"));
1400 else
1401 warn_odr (t1, t2, NULL, NULL, warn, warned,
1402 G_("a type defined in another translation unit "
1403 "is polymorphic"));
1404 return false;
1405 }
1406 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1407 f1 || f2;
1408 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1409 {
1410 /* Skip non-fields. */
1411 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1412 f1 = TREE_CHAIN (f1);
1413 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1414 f2 = TREE_CHAIN (f2);
1415 if (!f1 || !f2)
1416 break;
1417 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1418 {
1419 warn_odr (t1, t2, NULL, NULL, warn, warned,
1420 G_("a type with different virtual table pointers"
1421 " is defined in another translation unit"));
1422 return false;
1423 }
1424 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1425 {
1426 warn_odr (t1, t2, NULL, NULL, warn, warned,
1427 G_("a type with different bases is defined "
1428 "in another translation unit"));
1429 return false;
1430 }
1431 if (DECL_NAME (f1) != DECL_NAME (f2)
1432 && !DECL_ARTIFICIAL (f1))
1433 {
1434 warn_odr (t1, t2, f1, f2, warn, warned,
1435 G_("a field with different name is defined "
1436 "in another translation unit"));
1437 return false;
1438 }
1439 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1440 TREE_TYPE (f2), visited))
1441 {
1442 /* Do not warn about artificial fields and just go into
1443 generic field mismatch warning. */
1444 if (DECL_ARTIFICIAL (f1))
1445 break;
1446
1447 warn_odr (t1, t2, f1, f2, warn, warned,
1448 G_("a field of same name but different type "
1449 "is defined in another translation unit"));
1450 if (warn && warned)
1451 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1452 return false;
1453 }
1454 if (!gimple_compare_field_offset (f1, f2))
1455 {
1456 /* Do not warn about artificial fields and just go into
1457 generic field mismatch warning. */
1458 if (DECL_ARTIFICIAL (f1))
1459 break;
1460 warn_odr (t1, t2, f1, f2, warn, warned,
1461 G_("fields has different layout "
1462 "in another translation unit"));
1463 return false;
1464 }
1465 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1466 == DECL_NONADDRESSABLE_P (f2));
1467 }
1468
1469 /* If one aggregate has more fields than the other, they
1470 are not the same. */
1471 if (f1 || f2)
1472 {
1473 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1474 warn_odr (t1, t2, NULL, NULL, warn, warned,
1475 G_("a type with different virtual table pointers"
1476 " is defined in another translation unit"));
1477 else if ((f1 && DECL_ARTIFICIAL (f1))
1478 || (f2 && DECL_ARTIFICIAL (f2)))
1479 warn_odr (t1, t2, NULL, NULL, warn, warned,
1480 G_("a type with different bases is defined "
1481 "in another translation unit"));
1482 else
1483 warn_odr (t1, t2, f1, f2, warn, warned,
1484 G_("a type with different number of fields "
1485 "is defined in another translation unit"));
1486
1487 return false;
1488 }
1489 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1490 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1491 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1492 {
1493 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1494 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1495 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1496 {
1497 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1498 {
1499 warn_odr (t1, t2, f1, f2, warn, warned,
1500 G_("a different method of same type "
1501 "is defined in another translation unit"));
1502 return false;
1503 }
1504 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1505 {
1506 warn_odr (t1, t2, f1, f2, warn, warned,
1507 G_("s definition that differs by virtual "
1508 "keyword in another translation unit"));
1509 return false;
1510 }
1511 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1512 {
1513 warn_odr (t1, t2, f1, f2, warn, warned,
1514 G_("virtual table layout differs in another "
1515 "translation unit"));
1516 return false;
1517 }
1518 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1519 {
1520 warn_odr (t1, t2, f1, f2, warn, warned,
1521 G_("method with incompatible type is defined "
1522 "in another translation unit"));
1523 return false;
1524 }
1525 }
1526 if (f1 || f2)
1527 {
1528 warn_odr (t1, t2, NULL, NULL, warn, warned,
1529 G_("a type with different number of methods "
1530 "is defined in another translation unit"));
1531 return false;
1532 }
1533 }
1534 }
1535 break;
1536 }
1537 case VOID_TYPE:
1538 break;
1539
1540 default:
1541 debug_tree (t1);
1542 gcc_unreachable ();
1543 }
1544
1545 /* Those are better to come last as they are utterly uninformative. */
1546 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1547 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1548 {
1549 warn_odr (t1, t2, NULL, NULL, warn, warned,
1550 G_("a type with different size "
1551 "is defined in another translation unit"));
1552 return false;
1553 }
1554 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1555 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1556 {
1557 warn_odr (t1, t2, NULL, NULL, warn, warned,
1558 G_("a type with different alignment "
1559 "is defined in another translation unit"));
1560 return false;
1561 }
1562 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1563 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1564 TYPE_SIZE_UNIT (t2), 0));
1565 return true;
1566 }
1567
1568 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1569 from VAL->type. This may happen in LTO where tree merging did not merge
1570 all variants of the same type or due to ODR violation.
1571
1572 Analyze and report ODR violations and add type to duplicate list.
1573 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1574 this is first time we see definition of a class return true so the
1575 base types are analyzed. */
1576
1577 static bool
1578 add_type_duplicate (odr_type val, tree type)
1579 {
1580 bool build_bases = false;
1581 bool prevail = false;
1582 bool odr_must_violate = false;
1583
1584 if (!val->types_set)
1585 val->types_set = new hash_set<tree>;
1586
1587 /* Chose polymorphic type as leader (this happens only in case of ODR
1588 violations. */
1589 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1590 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1591 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1592 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1593 {
1594 prevail = true;
1595 build_bases = true;
1596 }
1597 /* Always prefer complete type to be the leader. */
1598 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1599 {
1600 prevail = true;
1601 build_bases = TYPE_BINFO (type);
1602 }
1603 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1604 ;
1605 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1606 && TREE_CODE (type) == ENUMERAL_TYPE
1607 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1608 prevail = true;
1609 else if (TREE_CODE (val->type) == RECORD_TYPE
1610 && TREE_CODE (type) == RECORD_TYPE
1611 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1612 {
1613 gcc_assert (!val->bases.length ());
1614 build_bases = true;
1615 prevail = true;
1616 }
1617
1618 if (prevail)
1619 {
1620 tree tmp = type;
1621
1622 type = val->type;
1623 val->type = tmp;
1624 }
1625
1626 val->types_set->add (type);
1627
1628 /* If we now have a mangled name, be sure to record it to val->type
1629 so ODR hash can work. */
1630
1631 if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1632 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1633 DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1634
1635 bool merge = true;
1636 bool base_mismatch = false;
1637 unsigned int i;
1638 bool warned = false;
1639 hash_set<type_pair,pair_traits> visited;
1640
1641 gcc_assert (in_lto_p);
1642 vec_safe_push (val->types, type);
1643
1644 /* If both are class types, compare the bases. */
1645 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1646 && TREE_CODE (val->type) == RECORD_TYPE
1647 && TREE_CODE (type) == RECORD_TYPE
1648 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1649 {
1650 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1651 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1652 {
1653 if (!flag_ltrans && !warned && !val->odr_violated)
1654 {
1655 tree extra_base;
1656 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1657 "a type with the same name but different "
1658 "number of polymorphic bases is "
1659 "defined in another translation unit");
1660 if (warned)
1661 {
1662 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1663 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1664 extra_base = BINFO_BASE_BINFO
1665 (TYPE_BINFO (type),
1666 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1667 else
1668 extra_base = BINFO_BASE_BINFO
1669 (TYPE_BINFO (val->type),
1670 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1671 tree extra_base_type = BINFO_TYPE (extra_base);
1672 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1673 "the extra base is defined here");
1674 }
1675 }
1676 base_mismatch = true;
1677 }
1678 else
1679 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1680 {
1681 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1682 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1683 tree type1 = BINFO_TYPE (base1);
1684 tree type2 = BINFO_TYPE (base2);
1685
1686 if (types_odr_comparable (type1, type2))
1687 {
1688 if (!types_same_for_odr (type1, type2))
1689 base_mismatch = true;
1690 }
1691 else
1692 {
1693 hash_set<type_pair,pair_traits> visited;
1694 if (!odr_types_equivalent_p (type1, type2, false, NULL,
1695 &visited))
1696 base_mismatch = true;
1697 }
1698 if (base_mismatch)
1699 {
1700 if (!warned && !val->odr_violated)
1701 {
1702 warn_odr (type, val->type, NULL, NULL,
1703 !warned, &warned,
1704 "a type with the same name but different base "
1705 "type is defined in another translation unit");
1706 if (warned)
1707 warn_types_mismatch (type1, type2);
1708 }
1709 break;
1710 }
1711 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1712 {
1713 base_mismatch = true;
1714 if (!warned && !val->odr_violated)
1715 warn_odr (type, val->type, NULL, NULL,
1716 !warned, &warned,
1717 "a type with the same name but different base "
1718 "layout is defined in another translation unit");
1719 break;
1720 }
1721 /* One of bases is not of complete type. */
1722 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1723 {
1724 /* If we have a polymorphic type info specified for TYPE1
1725 but not for TYPE2 we possibly missed a base when recording
1726 VAL->type earlier.
1727 Be sure this does not happen. */
1728 if (TYPE_BINFO (type1)
1729 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1730 && !build_bases)
1731 odr_must_violate = true;
1732 break;
1733 }
1734 /* One base is polymorphic and the other not.
1735 This ought to be diagnosed earlier, but do not ICE in the
1736 checking bellow. */
1737 else if (TYPE_BINFO (type1)
1738 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1739 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1740 {
1741 if (!warned && !val->odr_violated)
1742 warn_odr (type, val->type, NULL, NULL,
1743 !warned, &warned,
1744 "a base of the type is polymorphic only in one "
1745 "translation unit");
1746 base_mismatch = true;
1747 break;
1748 }
1749 }
1750 if (base_mismatch)
1751 {
1752 merge = false;
1753 odr_violation_reported = true;
1754 val->odr_violated = true;
1755
1756 if (symtab->dump_file)
1757 {
1758 fprintf (symtab->dump_file, "ODR base violation\n");
1759
1760 print_node (symtab->dump_file, "", val->type, 0);
1761 putc ('\n',symtab->dump_file);
1762 print_node (symtab->dump_file, "", type, 0);
1763 putc ('\n',symtab->dump_file);
1764 }
1765 }
1766 }
1767
1768 /* Next compare memory layout. */
1769 if (!odr_types_equivalent_p (val->type, type,
1770 !flag_ltrans && !val->odr_violated && !warned,
1771 &warned, &visited))
1772 {
1773 merge = false;
1774 odr_violation_reported = true;
1775 val->odr_violated = true;
1776 if (symtab->dump_file)
1777 {
1778 fprintf (symtab->dump_file, "ODR violation\n");
1779
1780 print_node (symtab->dump_file, "", val->type, 0);
1781 putc ('\n',symtab->dump_file);
1782 print_node (symtab->dump_file, "", type, 0);
1783 putc ('\n',symtab->dump_file);
1784 }
1785 }
1786 gcc_assert (val->odr_violated || !odr_must_violate);
1787 /* Sanity check that all bases will be build same way again. */
1788 #ifdef ENABLE_CHECKING
1789 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1790 && TREE_CODE (val->type) == RECORD_TYPE
1791 && TREE_CODE (type) == RECORD_TYPE
1792 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1793 && !val->odr_violated
1794 && !base_mismatch && val->bases.length ())
1795 {
1796 unsigned int num_poly_bases = 0;
1797 unsigned int j;
1798
1799 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1800 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1801 (TYPE_BINFO (type), i)))
1802 num_poly_bases++;
1803 gcc_assert (num_poly_bases == val->bases.length ());
1804 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1805 i++)
1806 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1807 (TYPE_BINFO (type), i)))
1808 {
1809 odr_type base = get_odr_type
1810 (BINFO_TYPE
1811 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1812 i)),
1813 true);
1814 gcc_assert (val->bases[j] == base);
1815 j++;
1816 }
1817 }
1818 #endif
1819
1820
1821 /* Regularize things a little. During LTO same types may come with
1822 different BINFOs. Either because their virtual table was
1823 not merged by tree merging and only later at decl merging or
1824 because one type comes with external vtable, while other
1825 with internal. We want to merge equivalent binfos to conserve
1826 memory and streaming overhead.
1827
1828 The external vtables are more harmful: they contain references
1829 to external declarations of methods that may be defined in the
1830 merged LTO unit. For this reason we absolutely need to remove
1831 them and replace by internal variants. Not doing so will lead
1832 to incomplete answers from possible_polymorphic_call_targets.
1833
1834 FIXME: disable for now; because ODR types are now build during
1835 streaming in, the variants do not need to be linked to the type,
1836 yet. We need to do the merging in cleanup pass to be implemented
1837 soon. */
1838 if (!flag_ltrans && merge
1839 && 0
1840 && TREE_CODE (val->type) == RECORD_TYPE
1841 && TREE_CODE (type) == RECORD_TYPE
1842 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1843 && TYPE_MAIN_VARIANT (type) == type
1844 && TYPE_MAIN_VARIANT (val->type) == val->type
1845 && BINFO_VTABLE (TYPE_BINFO (val->type))
1846 && BINFO_VTABLE (TYPE_BINFO (type)))
1847 {
1848 tree master_binfo = TYPE_BINFO (val->type);
1849 tree v1 = BINFO_VTABLE (master_binfo);
1850 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1851
1852 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1853 {
1854 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1855 && operand_equal_p (TREE_OPERAND (v1, 1),
1856 TREE_OPERAND (v2, 1), 0));
1857 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1858 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1859 }
1860 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1861 == DECL_ASSEMBLER_NAME (v2));
1862
1863 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1864 {
1865 unsigned int i;
1866
1867 set_type_binfo (val->type, TYPE_BINFO (type));
1868 for (i = 0; i < val->types->length (); i++)
1869 {
1870 if (TYPE_BINFO ((*val->types)[i])
1871 == master_binfo)
1872 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1873 }
1874 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1875 }
1876 else
1877 set_type_binfo (type, master_binfo);
1878 }
1879 return build_bases;
1880 }
1881
1882 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1883 possibly new entry. */
1884
1885 odr_type
1886 get_odr_type (tree type, bool insert)
1887 {
1888 odr_type_d **slot = NULL;
1889 odr_type_d **vtable_slot = NULL;
1890 odr_type val = NULL;
1891 hashval_t hash;
1892 bool build_bases = false;
1893 bool insert_to_odr_array = false;
1894 int base_id = -1;
1895
1896 type = main_odr_variant (type);
1897
1898 gcc_checking_assert (can_be_name_hashed_p (type)
1899 || can_be_vtable_hashed_p (type));
1900
1901 /* Lookup entry, first try name hash, fallback to vtable hash. */
1902 if (can_be_name_hashed_p (type))
1903 {
1904 hash = hash_odr_name (type);
1905 slot = odr_hash->find_slot_with_hash (type, hash,
1906 insert ? INSERT : NO_INSERT);
1907 }
1908 if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
1909 {
1910 hash = hash_odr_vtable (type);
1911 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1912 insert ? INSERT : NO_INSERT);
1913 }
1914
1915 if (!slot && !vtable_slot)
1916 return NULL;
1917
1918 /* See if we already have entry for type. */
1919 if ((slot && *slot) || (vtable_slot && *vtable_slot))
1920 {
1921 if (slot && *slot)
1922 {
1923 val = *slot;
1924 #ifdef ENABLE_CHECKING
1925 if (in_lto_p && can_be_vtable_hashed_p (type))
1926 {
1927 hash = hash_odr_vtable (type);
1928 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
1929 NO_INSERT);
1930 gcc_assert (!vtable_slot || *vtable_slot == *slot);
1931 vtable_slot = NULL;
1932 }
1933 #endif
1934 }
1935 else if (*vtable_slot)
1936 val = *vtable_slot;
1937
1938 if (val->type != type
1939 && (!val->types_set || !val->types_set->add (type)))
1940 {
1941 gcc_assert (insert);
1942 /* We have type duplicate, but it may introduce vtable name or
1943 mangled name; be sure to keep hashes in sync. */
1944 if (in_lto_p && can_be_vtable_hashed_p (type)
1945 && (!vtable_slot || !*vtable_slot))
1946 {
1947 if (!vtable_slot)
1948 {
1949 hash = hash_odr_vtable (type);
1950 vtable_slot = odr_vtable_hash->find_slot_with_hash
1951 (type, hash, INSERT);
1952 gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
1953 }
1954 *vtable_slot = val;
1955 }
1956 if (slot && !*slot)
1957 *slot = val;
1958 build_bases = add_type_duplicate (val, type);
1959 }
1960 }
1961 else
1962 {
1963 val = ggc_cleared_alloc<odr_type_d> ();
1964 val->type = type;
1965 val->bases = vNULL;
1966 val->derived_types = vNULL;
1967 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1968 build_bases = COMPLETE_TYPE_P (val->type);
1969 insert_to_odr_array = true;
1970 if (slot)
1971 *slot = val;
1972 if (vtable_slot)
1973 *vtable_slot = val;
1974 }
1975
1976 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1977 && type == TYPE_MAIN_VARIANT (type))
1978 {
1979 tree binfo = TYPE_BINFO (type);
1980 unsigned int i;
1981
1982 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
1983
1984 val->all_derivations_known = type_all_derivations_known_p (type);
1985 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1986 /* For now record only polymorphic types. other are
1987 pointless for devirtualization and we can not precisely
1988 determine ODR equivalency of these during LTO. */
1989 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1990 {
1991 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
1992 odr_type base = get_odr_type (base_type, true);
1993 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
1994 base->derived_types.safe_push (val);
1995 val->bases.safe_push (base);
1996 if (base->id > base_id)
1997 base_id = base->id;
1998 }
1999 }
2000 /* Ensure that type always appears after bases. */
2001 if (insert_to_odr_array)
2002 {
2003 if (odr_types_ptr)
2004 val->id = odr_types.length ();
2005 vec_safe_push (odr_types_ptr, val);
2006 }
2007 else if (base_id > val->id)
2008 {
2009 odr_types[val->id] = 0;
2010 /* Be sure we did not recorded any derived types; these may need
2011 renumbering too. */
2012 gcc_assert (val->derived_types.length() == 0);
2013 if (odr_types_ptr)
2014 val->id = odr_types.length ();
2015 vec_safe_push (odr_types_ptr, val);
2016 }
2017 return val;
2018 }
2019
2020 /* Add TYPE od ODR type hash. */
2021
2022 void
2023 register_odr_type (tree type)
2024 {
2025 if (!odr_hash)
2026 {
2027 odr_hash = new odr_hash_type (23);
2028 if (in_lto_p)
2029 odr_vtable_hash = new odr_vtable_hash_type (23);
2030 }
2031 /* Arrange things to be nicer and insert main variants first. */
2032 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
2033 get_odr_type (TYPE_MAIN_VARIANT (type), true);
2034 if (TYPE_MAIN_VARIANT (type) != type)
2035 get_odr_type (type, true);
2036 }
2037
2038 /* Return true if type is known to have no derivations. */
2039
2040 bool
2041 type_known_to_have_no_deriavations_p (tree t)
2042 {
2043 return (type_all_derivations_known_p (t)
2044 && (TYPE_FINAL_P (t)
2045 || (odr_hash
2046 && !get_odr_type (t, true)->derived_types.length())));
2047 }
2048
2049 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2050 recursive printing. */
2051
2052 static void
2053 dump_odr_type (FILE *f, odr_type t, int indent=0)
2054 {
2055 unsigned int i;
2056 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2057 print_generic_expr (f, t->type, TDF_SLIM);
2058 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2059 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2060 if (TYPE_NAME (t->type))
2061 {
2062 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2063 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2064 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2065 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2066 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2067 IDENTIFIER_POINTER
2068 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2069 }
2070 if (t->bases.length ())
2071 {
2072 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2073 for (i = 0; i < t->bases.length (); i++)
2074 fprintf (f, " %i", t->bases[i]->id);
2075 fprintf (f, "\n");
2076 }
2077 if (t->derived_types.length ())
2078 {
2079 fprintf (f, "%*s derived types:\n", indent * 2, "");
2080 for (i = 0; i < t->derived_types.length (); i++)
2081 dump_odr_type (f, t->derived_types[i], indent + 1);
2082 }
2083 fprintf (f, "\n");
2084 }
2085
2086 /* Dump the type inheritance graph. */
2087
2088 static void
2089 dump_type_inheritance_graph (FILE *f)
2090 {
2091 unsigned int i;
2092 if (!odr_types_ptr)
2093 return;
2094 fprintf (f, "\n\nType inheritance graph:\n");
2095 for (i = 0; i < odr_types.length (); i++)
2096 {
2097 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2098 dump_odr_type (f, odr_types[i]);
2099 }
2100 for (i = 0; i < odr_types.length (); i++)
2101 {
2102 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2103 {
2104 unsigned int j;
2105 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2106 print_node (f, "", odr_types[i]->type, 0);
2107 for (j = 0; j < odr_types[i]->types->length (); j++)
2108 {
2109 tree t;
2110 fprintf (f, "duplicate #%i\n", j);
2111 print_node (f, "", (*odr_types[i]->types)[j], 0);
2112 t = (*odr_types[i]->types)[j];
2113 while (TYPE_P (t) && TYPE_CONTEXT (t))
2114 {
2115 t = TYPE_CONTEXT (t);
2116 print_node (f, "", t, 0);
2117 }
2118 putc ('\n',f);
2119 }
2120 }
2121 }
2122 }
2123
2124 /* Given method type T, return type of class it belongs to.
2125 Look up this pointer and get its type. */
2126
2127 tree
2128 method_class_type (const_tree t)
2129 {
2130 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
2131 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
2132
2133 return TREE_TYPE (first_parm_type);
2134 }
2135
2136 /* Initialize IPA devirt and build inheritance tree graph. */
2137
2138 void
2139 build_type_inheritance_graph (void)
2140 {
2141 struct symtab_node *n;
2142 FILE *inheritance_dump_file;
2143 int flags;
2144
2145 if (odr_hash)
2146 return;
2147 timevar_push (TV_IPA_INHERITANCE);
2148 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2149 odr_hash = new odr_hash_type (23);
2150 if (in_lto_p)
2151 odr_vtable_hash = new odr_vtable_hash_type (23);
2152
2153 /* We reconstruct the graph starting of types of all methods seen in the
2154 the unit. */
2155 FOR_EACH_SYMBOL (n)
2156 if (is_a <cgraph_node *> (n)
2157 && DECL_VIRTUAL_P (n->decl)
2158 && n->real_symbol_p ())
2159 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
2160 true);
2161
2162 /* Look also for virtual tables of types that do not define any methods.
2163
2164 We need it in a case where class B has virtual base of class A
2165 re-defining its virtual method and there is class C with no virtual
2166 methods with B as virtual base.
2167
2168 Here we output B's virtual method in two variant - for non-virtual
2169 and virtual inheritance. B's virtual table has non-virtual version,
2170 while C's has virtual.
2171
2172 For this reason we need to know about C in order to include both
2173 variants of B. More correctly, record_target_from_binfo should
2174 add both variants of the method when walking B, but we have no
2175 link in between them.
2176
2177 We rely on fact that either the method is exported and thus we
2178 assume it is called externally or C is in anonymous namespace and
2179 thus we will see the vtable. */
2180
2181 else if (is_a <varpool_node *> (n)
2182 && DECL_VIRTUAL_P (n->decl)
2183 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2184 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2185 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2186 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2187 if (inheritance_dump_file)
2188 {
2189 dump_type_inheritance_graph (inheritance_dump_file);
2190 dump_end (TDI_inheritance, inheritance_dump_file);
2191 }
2192 timevar_pop (TV_IPA_INHERITANCE);
2193 }
2194
2195 /* Return true if N has reference from live virtual table
2196 (and thus can be a destination of polymorphic call).
2197 Be conservatively correct when callgraph is not built or
2198 if the method may be referred externally. */
2199
2200 static bool
2201 referenced_from_vtable_p (struct cgraph_node *node)
2202 {
2203 int i;
2204 struct ipa_ref *ref;
2205 bool found = false;
2206
2207 if (node->externally_visible
2208 || DECL_EXTERNAL (node->decl)
2209 || node->used_from_other_partition)
2210 return true;
2211
2212 /* Keep this test constant time.
2213 It is unlikely this can happen except for the case where speculative
2214 devirtualization introduced many speculative edges to this node.
2215 In this case the target is very likely alive anyway. */
2216 if (node->ref_list.referring.length () > 100)
2217 return true;
2218
2219 /* We need references built. */
2220 if (symtab->state <= CONSTRUCTION)
2221 return true;
2222
2223 for (i = 0; node->iterate_referring (i, ref); i++)
2224 if ((ref->use == IPA_REF_ALIAS
2225 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2226 || (ref->use == IPA_REF_ADDR
2227 && TREE_CODE (ref->referring->decl) == VAR_DECL
2228 && DECL_VIRTUAL_P (ref->referring->decl)))
2229 {
2230 found = true;
2231 break;
2232 }
2233 return found;
2234 }
2235
2236 /* If TARGET has associated node, record it in the NODES array.
2237 CAN_REFER specify if program can refer to the target directly.
2238 if TARGET is unknown (NULL) or it can not be inserted (for example because
2239 its body was already removed and there is no way to refer to it), clear
2240 COMPLETEP. */
2241
2242 static void
2243 maybe_record_node (vec <cgraph_node *> &nodes,
2244 tree target, hash_set<tree> *inserted,
2245 bool can_refer,
2246 bool *completep)
2247 {
2248 struct cgraph_node *target_node, *alias_target;
2249 enum availability avail;
2250
2251 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2252 list of targets; the runtime effect of calling them is undefined.
2253 Only "real" virtual methods should be accounted. */
2254 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
2255 return;
2256
2257 if (!can_refer)
2258 {
2259 /* The only case when method of anonymous namespace becomes unreferable
2260 is when we completely optimized it out. */
2261 if (flag_ltrans
2262 || !target
2263 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2264 *completep = false;
2265 return;
2266 }
2267
2268 if (!target)
2269 return;
2270
2271 target_node = cgraph_node::get (target);
2272
2273 /* Prefer alias target over aliases, so we do not get confused by
2274 fake duplicates. */
2275 if (target_node)
2276 {
2277 alias_target = target_node->ultimate_alias_target (&avail);
2278 if (target_node != alias_target
2279 && avail >= AVAIL_AVAILABLE
2280 && target_node->get_availability ())
2281 target_node = alias_target;
2282 }
2283
2284 /* Method can only be called by polymorphic call if any
2285 of vtables referring to it are alive.
2286
2287 While this holds for non-anonymous functions, too, there are
2288 cases where we want to keep them in the list; for example
2289 inline functions with -fno-weak are static, but we still
2290 may devirtualize them when instance comes from other unit.
2291 The same holds for LTO.
2292
2293 Currently we ignore these functions in speculative devirtualization.
2294 ??? Maybe it would make sense to be more aggressive for LTO even
2295 elsewhere. */
2296 if (!flag_ltrans
2297 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2298 && (!target_node
2299 || !referenced_from_vtable_p (target_node)))
2300 ;
2301 /* See if TARGET is useful function we can deal with. */
2302 else if (target_node != NULL
2303 && (TREE_PUBLIC (target)
2304 || DECL_EXTERNAL (target)
2305 || target_node->definition)
2306 && target_node->real_symbol_p ())
2307 {
2308 gcc_assert (!target_node->global.inlined_to);
2309 gcc_assert (target_node->real_symbol_p ());
2310 if (!inserted->add (target))
2311 {
2312 cached_polymorphic_call_targets->add (target_node);
2313 nodes.safe_push (target_node);
2314 }
2315 }
2316 else if (completep
2317 && (!type_in_anonymous_namespace_p
2318 (DECL_CONTEXT (target))
2319 || flag_ltrans))
2320 *completep = false;
2321 }
2322
2323 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2324 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2325 method in vtable and insert method to NODES array
2326 or BASES_TO_CONSIDER if this array is non-NULL.
2327 Otherwise recurse to base BINFOs.
2328 This matches what get_binfo_at_offset does, but with offset
2329 being unknown.
2330
2331 TYPE_BINFOS is a stack of BINFOS of types with defined
2332 virtual table seen on way from class type to BINFO.
2333
2334 MATCHED_VTABLES tracks virtual tables we already did lookup
2335 for virtual function in. INSERTED tracks nodes we already
2336 inserted.
2337
2338 ANONYMOUS is true if BINFO is part of anonymous namespace.
2339
2340 Clear COMPLETEP when we hit unreferable target.
2341 */
2342
2343 static void
2344 record_target_from_binfo (vec <cgraph_node *> &nodes,
2345 vec <tree> *bases_to_consider,
2346 tree binfo,
2347 tree otr_type,
2348 vec <tree> &type_binfos,
2349 HOST_WIDE_INT otr_token,
2350 tree outer_type,
2351 HOST_WIDE_INT offset,
2352 hash_set<tree> *inserted,
2353 hash_set<tree> *matched_vtables,
2354 bool anonymous,
2355 bool *completep)
2356 {
2357 tree type = BINFO_TYPE (binfo);
2358 int i;
2359 tree base_binfo;
2360
2361
2362 if (BINFO_VTABLE (binfo))
2363 type_binfos.safe_push (binfo);
2364 if (types_same_for_odr (type, outer_type))
2365 {
2366 int i;
2367 tree type_binfo = NULL;
2368
2369 /* Look up BINFO with virtual table. For normal types it is always last
2370 binfo on stack. */
2371 for (i = type_binfos.length () - 1; i >= 0; i--)
2372 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2373 {
2374 type_binfo = type_binfos[i];
2375 break;
2376 }
2377 if (BINFO_VTABLE (binfo))
2378 type_binfos.pop ();
2379 /* If this is duplicated BINFO for base shared by virtual inheritance,
2380 we may not have its associated vtable. This is not a problem, since
2381 we will walk it on the other path. */
2382 if (!type_binfo)
2383 return;
2384 tree inner_binfo = get_binfo_at_offset (type_binfo,
2385 offset, otr_type);
2386 if (!inner_binfo)
2387 {
2388 gcc_assert (odr_violation_reported);
2389 return;
2390 }
2391 /* For types in anonymous namespace first check if the respective vtable
2392 is alive. If not, we know the type can't be called. */
2393 if (!flag_ltrans && anonymous)
2394 {
2395 tree vtable = BINFO_VTABLE (inner_binfo);
2396 varpool_node *vnode;
2397
2398 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2399 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2400 vnode = varpool_node::get (vtable);
2401 if (!vnode || !vnode->definition)
2402 return;
2403 }
2404 gcc_assert (inner_binfo);
2405 if (bases_to_consider
2406 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2407 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2408 {
2409 bool can_refer;
2410 tree target = gimple_get_virt_method_for_binfo (otr_token,
2411 inner_binfo,
2412 &can_refer);
2413 if (!bases_to_consider)
2414 maybe_record_node (nodes, target, inserted, can_refer, completep);
2415 /* Destructors are never called via construction vtables. */
2416 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2417 bases_to_consider->safe_push (target);
2418 }
2419 return;
2420 }
2421
2422 /* Walk bases. */
2423 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2424 /* Walking bases that have no virtual method is pointless exercise. */
2425 if (polymorphic_type_binfo_p (base_binfo))
2426 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2427 type_binfos,
2428 otr_token, outer_type, offset, inserted,
2429 matched_vtables, anonymous, completep);
2430 if (BINFO_VTABLE (binfo))
2431 type_binfos.pop ();
2432 }
2433
2434 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2435 of TYPE, insert them to NODES, recurse into derived nodes.
2436 INSERTED is used to avoid duplicate insertions of methods into NODES.
2437 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2438 Clear COMPLETEP if unreferable target is found.
2439
2440 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2441 all cases where BASE_SKIPPED is true (because the base is abstract
2442 class). */
2443
2444 static void
2445 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2446 hash_set<tree> *inserted,
2447 hash_set<tree> *matched_vtables,
2448 tree otr_type,
2449 odr_type type,
2450 HOST_WIDE_INT otr_token,
2451 tree outer_type,
2452 HOST_WIDE_INT offset,
2453 bool *completep,
2454 vec <tree> &bases_to_consider,
2455 bool consider_construction)
2456 {
2457 tree binfo = TYPE_BINFO (type->type);
2458 unsigned int i;
2459 auto_vec <tree, 8> type_binfos;
2460 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2461
2462 /* We may need to consider types w/o instances because of possible derived
2463 types using their methods either directly or via construction vtables.
2464 We are safe to skip them when all derivations are known, since we will
2465 handle them later.
2466 This is done by recording them to BASES_TO_CONSIDER array. */
2467 if (possibly_instantiated || consider_construction)
2468 {
2469 record_target_from_binfo (nodes,
2470 (!possibly_instantiated
2471 && type_all_derivations_known_p (type->type))
2472 ? &bases_to_consider : NULL,
2473 binfo, otr_type, type_binfos, otr_token,
2474 outer_type, offset,
2475 inserted, matched_vtables,
2476 type->anonymous_namespace, completep);
2477 }
2478 for (i = 0; i < type->derived_types.length (); i++)
2479 possible_polymorphic_call_targets_1 (nodes, inserted,
2480 matched_vtables,
2481 otr_type,
2482 type->derived_types[i],
2483 otr_token, outer_type, offset, completep,
2484 bases_to_consider, consider_construction);
2485 }
2486
2487 /* Cache of queries for polymorphic call targets.
2488
2489 Enumerating all call targets may get expensive when there are many
2490 polymorphic calls in the program, so we memoize all the previous
2491 queries and avoid duplicated work. */
2492
2493 struct polymorphic_call_target_d
2494 {
2495 HOST_WIDE_INT otr_token;
2496 ipa_polymorphic_call_context context;
2497 odr_type type;
2498 vec <cgraph_node *> targets;
2499 tree decl_warning;
2500 int type_warning;
2501 bool complete;
2502 bool speculative;
2503 };
2504
2505 /* Polymorphic call target cache helpers. */
2506
2507 struct polymorphic_call_target_hasher
2508 {
2509 typedef polymorphic_call_target_d value_type;
2510 typedef polymorphic_call_target_d compare_type;
2511 static inline hashval_t hash (const value_type *);
2512 static inline bool equal (const value_type *, const compare_type *);
2513 static inline void remove (value_type *);
2514 };
2515
2516 /* Return the computed hashcode for ODR_QUERY. */
2517
2518 inline hashval_t
2519 polymorphic_call_target_hasher::hash (const value_type *odr_query)
2520 {
2521 inchash::hash hstate (odr_query->otr_token);
2522
2523 hstate.add_wide_int (odr_query->type->id);
2524 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2525 hstate.add_wide_int (odr_query->context.offset);
2526
2527 if (odr_query->context.speculative_outer_type)
2528 {
2529 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2530 hstate.add_wide_int (odr_query->context.speculative_offset);
2531 }
2532 hstate.add_flag (odr_query->speculative);
2533 hstate.add_flag (odr_query->context.maybe_in_construction);
2534 hstate.add_flag (odr_query->context.maybe_derived_type);
2535 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2536 hstate.commit_flag ();
2537 return hstate.end ();
2538 }
2539
2540 /* Compare cache entries T1 and T2. */
2541
2542 inline bool
2543 polymorphic_call_target_hasher::equal (const value_type *t1,
2544 const compare_type *t2)
2545 {
2546 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2547 && t1->speculative == t2->speculative
2548 && t1->context.offset == t2->context.offset
2549 && t1->context.speculative_offset == t2->context.speculative_offset
2550 && t1->context.outer_type == t2->context.outer_type
2551 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2552 && t1->context.maybe_in_construction
2553 == t2->context.maybe_in_construction
2554 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2555 && (t1->context.speculative_maybe_derived_type
2556 == t2->context.speculative_maybe_derived_type));
2557 }
2558
2559 /* Remove entry in polymorphic call target cache hash. */
2560
2561 inline void
2562 polymorphic_call_target_hasher::remove (value_type *v)
2563 {
2564 v->targets.release ();
2565 free (v);
2566 }
2567
2568 /* Polymorphic call target query cache. */
2569
2570 typedef hash_table<polymorphic_call_target_hasher>
2571 polymorphic_call_target_hash_type;
2572 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2573
2574 /* Destroy polymorphic call target query cache. */
2575
2576 static void
2577 free_polymorphic_call_targets_hash ()
2578 {
2579 if (cached_polymorphic_call_targets)
2580 {
2581 delete polymorphic_call_target_hash;
2582 polymorphic_call_target_hash = NULL;
2583 delete cached_polymorphic_call_targets;
2584 cached_polymorphic_call_targets = NULL;
2585 }
2586 }
2587
2588 /* When virtual function is removed, we may need to flush the cache. */
2589
2590 static void
2591 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2592 {
2593 if (cached_polymorphic_call_targets
2594 && cached_polymorphic_call_targets->contains (n))
2595 free_polymorphic_call_targets_hash ();
2596 }
2597
2598 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2599
2600 tree
2601 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2602 tree vtable)
2603 {
2604 tree v = BINFO_VTABLE (binfo);
2605 int i;
2606 tree base_binfo;
2607 unsigned HOST_WIDE_INT this_offset;
2608
2609 if (v)
2610 {
2611 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2612 gcc_unreachable ();
2613
2614 if (offset == this_offset
2615 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2616 return binfo;
2617 }
2618
2619 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2620 if (polymorphic_type_binfo_p (base_binfo))
2621 {
2622 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2623 if (base_binfo)
2624 return base_binfo;
2625 }
2626 return NULL;
2627 }
2628
2629 /* T is known constant value of virtual table pointer.
2630 Store virtual table to V and its offset to OFFSET.
2631 Return false if T does not look like virtual table reference. */
2632
2633 bool
2634 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2635 unsigned HOST_WIDE_INT *offset)
2636 {
2637 /* We expect &MEM[(void *)&virtual_table + 16B].
2638 We obtain object's BINFO from the context of the virtual table.
2639 This one contains pointer to virtual table represented via
2640 POINTER_PLUS_EXPR. Verify that this pointer matches what
2641 we propagated through.
2642
2643 In the case of virtual inheritance, the virtual tables may
2644 be nested, i.e. the offset may be different from 16 and we may
2645 need to dive into the type representation. */
2646 if (TREE_CODE (t) == ADDR_EXPR
2647 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2648 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2649 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2650 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2651 == VAR_DECL)
2652 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2653 (TREE_OPERAND (t, 0), 0), 0)))
2654 {
2655 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2656 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2657 return true;
2658 }
2659
2660 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2661 We need to handle it when T comes from static variable initializer or
2662 BINFO. */
2663 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2664 {
2665 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2666 t = TREE_OPERAND (t, 0);
2667 }
2668 else
2669 *offset = 0;
2670
2671 if (TREE_CODE (t) != ADDR_EXPR)
2672 return false;
2673 *v = TREE_OPERAND (t, 0);
2674 return true;
2675 }
2676
2677 /* T is known constant value of virtual table pointer. Return BINFO of the
2678 instance type. */
2679
2680 tree
2681 vtable_pointer_value_to_binfo (const_tree t)
2682 {
2683 tree vtable;
2684 unsigned HOST_WIDE_INT offset;
2685
2686 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2687 return NULL_TREE;
2688
2689 /* FIXME: for stores of construction vtables we return NULL,
2690 because we do not have BINFO for those. Eventually we should fix
2691 our representation to allow this case to be handled, too.
2692 In the case we see store of BINFO we however may assume
2693 that standard folding will be able to cope with it. */
2694 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2695 offset, vtable);
2696 }
2697
2698 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2699 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2700 and insert them in NODES.
2701
2702 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2703
2704 static void
2705 record_targets_from_bases (tree otr_type,
2706 HOST_WIDE_INT otr_token,
2707 tree outer_type,
2708 HOST_WIDE_INT offset,
2709 vec <cgraph_node *> &nodes,
2710 hash_set<tree> *inserted,
2711 hash_set<tree> *matched_vtables,
2712 bool *completep)
2713 {
2714 while (true)
2715 {
2716 HOST_WIDE_INT pos, size;
2717 tree base_binfo;
2718 tree fld;
2719
2720 if (types_same_for_odr (outer_type, otr_type))
2721 return;
2722
2723 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2724 {
2725 if (TREE_CODE (fld) != FIELD_DECL)
2726 continue;
2727
2728 pos = int_bit_position (fld);
2729 size = tree_to_shwi (DECL_SIZE (fld));
2730 if (pos <= offset && (pos + size) > offset
2731 /* Do not get confused by zero sized bases. */
2732 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2733 break;
2734 }
2735 /* Within a class type we should always find corresponding fields. */
2736 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2737
2738 /* Nonbase types should have been stripped by outer_class_type. */
2739 gcc_assert (DECL_ARTIFICIAL (fld));
2740
2741 outer_type = TREE_TYPE (fld);
2742 offset -= pos;
2743
2744 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2745 offset, otr_type);
2746 if (!base_binfo)
2747 {
2748 gcc_assert (odr_violation_reported);
2749 return;
2750 }
2751 gcc_assert (base_binfo);
2752 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2753 {
2754 bool can_refer;
2755 tree target = gimple_get_virt_method_for_binfo (otr_token,
2756 base_binfo,
2757 &can_refer);
2758 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2759 maybe_record_node (nodes, target, inserted, can_refer, completep);
2760 matched_vtables->add (BINFO_VTABLE (base_binfo));
2761 }
2762 }
2763 }
2764
2765 /* When virtual table is removed, we may need to flush the cache. */
2766
2767 static void
2768 devirt_variable_node_removal_hook (varpool_node *n,
2769 void *d ATTRIBUTE_UNUSED)
2770 {
2771 if (cached_polymorphic_call_targets
2772 && DECL_VIRTUAL_P (n->decl)
2773 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2774 free_polymorphic_call_targets_hash ();
2775 }
2776
2777 /* Record about how many calls would benefit from given type to be final. */
2778
2779 struct odr_type_warn_count
2780 {
2781 tree type;
2782 int count;
2783 gcov_type dyn_count;
2784 };
2785
2786 /* Record about how many calls would benefit from given method to be final. */
2787
2788 struct decl_warn_count
2789 {
2790 tree decl;
2791 int count;
2792 gcov_type dyn_count;
2793 };
2794
2795 /* Information about type and decl warnings. */
2796
2797 struct final_warning_record
2798 {
2799 gcov_type dyn_count;
2800 vec<odr_type_warn_count> type_warnings;
2801 hash_map<tree, decl_warn_count> decl_warnings;
2802 };
2803 struct final_warning_record *final_warning_records;
2804
2805 /* Return vector containing possible targets of polymorphic call of type
2806 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2807 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2808 OTR_TYPE and include their virtual method. This is useful for types
2809 possibly in construction or destruction where the virtual table may
2810 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2811 us to walk the inheritance graph for all derivations.
2812
2813 If COMPLETEP is non-NULL, store true if the list is complete.
2814 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2815 in the target cache. If user needs to visit every target list
2816 just once, it can memoize them.
2817
2818 If SPECULATIVE is set, the list will not contain targets that
2819 are not speculatively taken.
2820
2821 Returned vector is placed into cache. It is NOT caller's responsibility
2822 to free it. The vector can be freed on cgraph_remove_node call if
2823 the particular node is a virtual function present in the cache. */
2824
2825 vec <cgraph_node *>
2826 possible_polymorphic_call_targets (tree otr_type,
2827 HOST_WIDE_INT otr_token,
2828 ipa_polymorphic_call_context context,
2829 bool *completep,
2830 void **cache_token,
2831 bool speculative)
2832 {
2833 static struct cgraph_node_hook_list *node_removal_hook_holder;
2834 vec <cgraph_node *> nodes = vNULL;
2835 auto_vec <tree, 8> bases_to_consider;
2836 odr_type type, outer_type;
2837 polymorphic_call_target_d key;
2838 polymorphic_call_target_d **slot;
2839 unsigned int i;
2840 tree binfo, target;
2841 bool complete;
2842 bool can_refer = false;
2843 bool skipped = false;
2844
2845 otr_type = TYPE_MAIN_VARIANT (otr_type);
2846
2847 /* If ODR is not initialized or the context is invalid, return empty
2848 incomplete list. */
2849 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2850 {
2851 if (completep)
2852 *completep = context.invalid;
2853 if (cache_token)
2854 *cache_token = NULL;
2855 return nodes;
2856 }
2857
2858 /* Do not bother to compute speculative info when user do not asks for it. */
2859 if (!speculative || !context.speculative_outer_type)
2860 context.clear_speculation ();
2861
2862 type = get_odr_type (otr_type, true);
2863
2864 /* Recording type variants would waste results cache. */
2865 gcc_assert (!context.outer_type
2866 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2867
2868 /* Look up the outer class type we want to walk.
2869 If we fail to do so, the context is invalid. */
2870 if ((context.outer_type || context.speculative_outer_type)
2871 && !context.restrict_to_inner_class (otr_type))
2872 {
2873 if (completep)
2874 *completep = true;
2875 if (cache_token)
2876 *cache_token = NULL;
2877 return nodes;
2878 }
2879 gcc_assert (!context.invalid);
2880
2881 /* Check that restrict_to_inner_class kept the main variant. */
2882 gcc_assert (!context.outer_type
2883 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2884
2885 /* We canonicalize our query, so we do not need extra hashtable entries. */
2886
2887 /* Without outer type, we have no use for offset. Just do the
2888 basic search from inner type. */
2889 if (!context.outer_type)
2890 context.clear_outer_type (otr_type);
2891 /* We need to update our hierarchy if the type does not exist. */
2892 outer_type = get_odr_type (context.outer_type, true);
2893 /* If the type is complete, there are no derivations. */
2894 if (TYPE_FINAL_P (outer_type->type))
2895 context.maybe_derived_type = false;
2896
2897 /* Initialize query cache. */
2898 if (!cached_polymorphic_call_targets)
2899 {
2900 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2901 polymorphic_call_target_hash
2902 = new polymorphic_call_target_hash_type (23);
2903 if (!node_removal_hook_holder)
2904 {
2905 node_removal_hook_holder =
2906 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2907 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2908 NULL);
2909 }
2910 }
2911
2912 if (in_lto_p)
2913 {
2914 if (context.outer_type != otr_type)
2915 context.outer_type
2916 = get_odr_type (context.outer_type, true)->type;
2917 if (context.speculative_outer_type)
2918 context.speculative_outer_type
2919 = get_odr_type (context.speculative_outer_type, true)->type;
2920 }
2921
2922 /* Look up cached answer. */
2923 key.type = type;
2924 key.otr_token = otr_token;
2925 key.speculative = speculative;
2926 key.context = context;
2927 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2928 if (cache_token)
2929 *cache_token = (void *)*slot;
2930 if (*slot)
2931 {
2932 if (completep)
2933 *completep = (*slot)->complete;
2934 if ((*slot)->type_warning && final_warning_records)
2935 {
2936 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2937 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2938 += final_warning_records->dyn_count;
2939 }
2940 if (!speculative && (*slot)->decl_warning && final_warning_records)
2941 {
2942 struct decl_warn_count *c =
2943 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2944 c->count++;
2945 c->dyn_count += final_warning_records->dyn_count;
2946 }
2947 return (*slot)->targets;
2948 }
2949
2950 complete = true;
2951
2952 /* Do actual search. */
2953 timevar_push (TV_IPA_VIRTUAL_CALL);
2954 *slot = XCNEW (polymorphic_call_target_d);
2955 if (cache_token)
2956 *cache_token = (void *)*slot;
2957 (*slot)->type = type;
2958 (*slot)->otr_token = otr_token;
2959 (*slot)->context = context;
2960 (*slot)->speculative = speculative;
2961
2962 hash_set<tree> inserted;
2963 hash_set<tree> matched_vtables;
2964
2965 /* First insert targets we speculatively identified as likely. */
2966 if (context.speculative_outer_type)
2967 {
2968 odr_type speculative_outer_type;
2969 bool speculation_complete = true;
2970
2971 /* First insert target from type itself and check if it may have
2972 derived types. */
2973 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2974 if (TYPE_FINAL_P (speculative_outer_type->type))
2975 context.speculative_maybe_derived_type = false;
2976 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2977 context.speculative_offset, otr_type);
2978 if (binfo)
2979 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2980 &can_refer);
2981 else
2982 target = NULL;
2983
2984 /* In the case we get complete method, we don't need
2985 to walk derivations. */
2986 if (target && DECL_FINAL_P (target))
2987 context.speculative_maybe_derived_type = false;
2988 if (type_possibly_instantiated_p (speculative_outer_type->type))
2989 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2990 if (binfo)
2991 matched_vtables.add (BINFO_VTABLE (binfo));
2992
2993
2994 /* Next walk recursively all derived types. */
2995 if (context.speculative_maybe_derived_type)
2996 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2997 possible_polymorphic_call_targets_1 (nodes, &inserted,
2998 &matched_vtables,
2999 otr_type,
3000 speculative_outer_type->derived_types[i],
3001 otr_token, speculative_outer_type->type,
3002 context.speculative_offset,
3003 &speculation_complete,
3004 bases_to_consider,
3005 false);
3006 }
3007
3008 if (!speculative || !nodes.length ())
3009 {
3010 /* First see virtual method of type itself. */
3011 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3012 context.offset, otr_type);
3013 if (binfo)
3014 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3015 &can_refer);
3016 else
3017 {
3018 gcc_assert (odr_violation_reported);
3019 target = NULL;
3020 }
3021
3022 /* Destructors are never called through construction virtual tables,
3023 because the type is always known. */
3024 if (target && DECL_CXX_DESTRUCTOR_P (target))
3025 context.maybe_in_construction = false;
3026
3027 if (target)
3028 {
3029 /* In the case we get complete method, we don't need
3030 to walk derivations. */
3031 if (DECL_FINAL_P (target))
3032 context.maybe_derived_type = false;
3033 }
3034
3035 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3036 if (type_possibly_instantiated_p (outer_type->type))
3037 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3038 else
3039 skipped = true;
3040
3041 if (binfo)
3042 matched_vtables.add (BINFO_VTABLE (binfo));
3043
3044 /* Next walk recursively all derived types. */
3045 if (context.maybe_derived_type)
3046 {
3047 for (i = 0; i < outer_type->derived_types.length(); i++)
3048 possible_polymorphic_call_targets_1 (nodes, &inserted,
3049 &matched_vtables,
3050 otr_type,
3051 outer_type->derived_types[i],
3052 otr_token, outer_type->type,
3053 context.offset, &complete,
3054 bases_to_consider,
3055 context.maybe_in_construction);
3056
3057 if (!outer_type->all_derivations_known)
3058 {
3059 if (!speculative && final_warning_records)
3060 {
3061 if (complete
3062 && nodes.length () == 1
3063 && warn_suggest_final_types
3064 && !outer_type->derived_types.length ())
3065 {
3066 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3067 final_warning_records->type_warnings.safe_grow_cleared
3068 (odr_types.length ());
3069 final_warning_records->type_warnings[outer_type->id].count++;
3070 final_warning_records->type_warnings[outer_type->id].dyn_count
3071 += final_warning_records->dyn_count;
3072 final_warning_records->type_warnings[outer_type->id].type
3073 = outer_type->type;
3074 (*slot)->type_warning = outer_type->id + 1;
3075 }
3076 if (complete
3077 && warn_suggest_final_methods
3078 && nodes.length () == 1
3079 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3080 outer_type->type))
3081 {
3082 bool existed;
3083 struct decl_warn_count &c =
3084 final_warning_records->decl_warnings.get_or_insert
3085 (nodes[0]->decl, &existed);
3086
3087 if (existed)
3088 {
3089 c.count++;
3090 c.dyn_count += final_warning_records->dyn_count;
3091 }
3092 else
3093 {
3094 c.count = 1;
3095 c.dyn_count = final_warning_records->dyn_count;
3096 c.decl = nodes[0]->decl;
3097 }
3098 (*slot)->decl_warning = nodes[0]->decl;
3099 }
3100 }
3101 complete = false;
3102 }
3103 }
3104
3105 if (!speculative)
3106 {
3107 /* Destructors are never called through construction virtual tables,
3108 because the type is always known. One of entries may be
3109 cxa_pure_virtual so look to at least two of them. */
3110 if (context.maybe_in_construction)
3111 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3112 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3113 context.maybe_in_construction = false;
3114 if (context.maybe_in_construction)
3115 {
3116 if (type != outer_type
3117 && (!skipped
3118 || (context.maybe_derived_type
3119 && !type_all_derivations_known_p (outer_type->type))))
3120 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3121 context.offset, nodes, &inserted,
3122 &matched_vtables, &complete);
3123 if (skipped)
3124 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3125 for (i = 0; i < bases_to_consider.length(); i++)
3126 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3127 }
3128 }
3129 }
3130
3131 (*slot)->targets = nodes;
3132 (*slot)->complete = complete;
3133 if (completep)
3134 *completep = complete;
3135
3136 timevar_pop (TV_IPA_VIRTUAL_CALL);
3137 return nodes;
3138 }
3139
3140 bool
3141 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3142 vec<const decl_warn_count*> *vec)
3143 {
3144 vec->safe_push (&value);
3145 return true;
3146 }
3147
3148 /* Dump target list TARGETS into FILE. */
3149
3150 static void
3151 dump_targets (FILE *f, vec <cgraph_node *> targets)
3152 {
3153 unsigned int i;
3154
3155 for (i = 0; i < targets.length (); i++)
3156 {
3157 char *name = NULL;
3158 if (in_lto_p)
3159 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3160 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3161 if (in_lto_p)
3162 free (name);
3163 if (!targets[i]->definition)
3164 fprintf (f, " (no definition%s)",
3165 DECL_DECLARED_INLINE_P (targets[i]->decl)
3166 ? " inline" : "");
3167 }
3168 fprintf (f, "\n");
3169 }
3170
3171 /* Dump all possible targets of a polymorphic call. */
3172
3173 void
3174 dump_possible_polymorphic_call_targets (FILE *f,
3175 tree otr_type,
3176 HOST_WIDE_INT otr_token,
3177 const ipa_polymorphic_call_context &ctx)
3178 {
3179 vec <cgraph_node *> targets;
3180 bool final;
3181 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3182 unsigned int len;
3183
3184 if (!type)
3185 return;
3186 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3187 ctx,
3188 &final, NULL, false);
3189 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3190 print_generic_expr (f, type->type, TDF_SLIM);
3191 fprintf (f, " token %i\n", (int)otr_token);
3192
3193 ctx.dump (f);
3194
3195 fprintf (f, " %s%s%s%s\n ",
3196 final ? "This is a complete list." :
3197 "This is partial list; extra targets may be defined in other units.",
3198 ctx.maybe_in_construction ? " (base types included)" : "",
3199 ctx.maybe_derived_type ? " (derived types included)" : "",
3200 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3201 len = targets.length ();
3202 dump_targets (f, targets);
3203
3204 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3205 ctx,
3206 &final, NULL, true);
3207 if (targets.length () != len)
3208 {
3209 fprintf (f, " Speculative targets:");
3210 dump_targets (f, targets);
3211 }
3212 gcc_assert (targets.length () <= len);
3213 fprintf (f, "\n");
3214 }
3215
3216
3217 /* Return true if N can be possibly target of a polymorphic call of
3218 OTR_TYPE/OTR_TOKEN. */
3219
3220 bool
3221 possible_polymorphic_call_target_p (tree otr_type,
3222 HOST_WIDE_INT otr_token,
3223 const ipa_polymorphic_call_context &ctx,
3224 struct cgraph_node *n)
3225 {
3226 vec <cgraph_node *> targets;
3227 unsigned int i;
3228 enum built_in_function fcode;
3229 bool final;
3230
3231 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3232 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3233 == BUILT_IN_UNREACHABLE
3234 || fcode == BUILT_IN_TRAP))
3235 return true;
3236
3237 if (!odr_hash)
3238 return true;
3239 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3240 for (i = 0; i < targets.length (); i++)
3241 if (n->semantically_equivalent_p (targets[i]))
3242 return true;
3243
3244 /* At a moment we allow middle end to dig out new external declarations
3245 as a targets of polymorphic calls. */
3246 if (!final && !n->definition)
3247 return true;
3248 return false;
3249 }
3250
3251
3252
3253 /* Return true if N can be possibly target of a polymorphic call of
3254 OBJ_TYPE_REF expression REF in STMT. */
3255
3256 bool
3257 possible_polymorphic_call_target_p (tree ref,
3258 gimple stmt,
3259 struct cgraph_node *n)
3260 {
3261 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3262 tree call_fn = gimple_call_fn (stmt);
3263
3264 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3265 tree_to_uhwi
3266 (OBJ_TYPE_REF_TOKEN (call_fn)),
3267 context,
3268 n);
3269 }
3270
3271
3272 /* After callgraph construction new external nodes may appear.
3273 Add them into the graph. */
3274
3275 void
3276 update_type_inheritance_graph (void)
3277 {
3278 struct cgraph_node *n;
3279
3280 if (!odr_hash)
3281 return;
3282 free_polymorphic_call_targets_hash ();
3283 timevar_push (TV_IPA_INHERITANCE);
3284 /* We reconstruct the graph starting from types of all methods seen in the
3285 the unit. */
3286 FOR_EACH_FUNCTION (n)
3287 if (DECL_VIRTUAL_P (n->decl)
3288 && !n->definition
3289 && n->real_symbol_p ())
3290 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3291 true);
3292 timevar_pop (TV_IPA_INHERITANCE);
3293 }
3294
3295
3296 /* Return true if N looks like likely target of a polymorphic call.
3297 Rule out cxa_pure_virtual, noreturns, function declared cold and
3298 other obvious cases. */
3299
3300 bool
3301 likely_target_p (struct cgraph_node *n)
3302 {
3303 int flags;
3304 /* cxa_pure_virtual and similar things are not likely. */
3305 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3306 return false;
3307 flags = flags_from_decl_or_type (n->decl);
3308 if (flags & ECF_NORETURN)
3309 return false;
3310 if (lookup_attribute ("cold",
3311 DECL_ATTRIBUTES (n->decl)))
3312 return false;
3313 if (n->frequency < NODE_FREQUENCY_NORMAL)
3314 return false;
3315 /* If there are no live virtual tables referring the target,
3316 the only way the target can be called is an instance coming from other
3317 compilation unit; speculative devirtualization is built around an
3318 assumption that won't happen. */
3319 if (!referenced_from_vtable_p (n))
3320 return false;
3321 return true;
3322 }
3323
3324 /* Compare type warning records P1 and P2 and choose one with larger count;
3325 helper for qsort. */
3326
3327 int
3328 type_warning_cmp (const void *p1, const void *p2)
3329 {
3330 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3331 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3332
3333 if (t1->dyn_count < t2->dyn_count)
3334 return 1;
3335 if (t1->dyn_count > t2->dyn_count)
3336 return -1;
3337 return t2->count - t1->count;
3338 }
3339
3340 /* Compare decl warning records P1 and P2 and choose one with larger count;
3341 helper for qsort. */
3342
3343 int
3344 decl_warning_cmp (const void *p1, const void *p2)
3345 {
3346 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3347 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3348
3349 if (t1->dyn_count < t2->dyn_count)
3350 return 1;
3351 if (t1->dyn_count > t2->dyn_count)
3352 return -1;
3353 return t2->count - t1->count;
3354 }
3355
3356
3357 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3358 context CTX. */
3359
3360 struct cgraph_node *
3361 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3362 ipa_polymorphic_call_context ctx)
3363 {
3364 vec <cgraph_node *>targets
3365 = possible_polymorphic_call_targets
3366 (otr_type, otr_token, ctx, NULL, NULL, true);
3367 unsigned int i;
3368 struct cgraph_node *likely_target = NULL;
3369
3370 for (i = 0; i < targets.length (); i++)
3371 if (likely_target_p (targets[i]))
3372 {
3373 if (likely_target)
3374 return NULL;
3375 likely_target = targets[i];
3376 }
3377 if (!likely_target
3378 ||!likely_target->definition
3379 || DECL_EXTERNAL (likely_target->decl))
3380 return NULL;
3381
3382 /* Don't use an implicitly-declared destructor (c++/58678). */
3383 struct cgraph_node *non_thunk_target
3384 = likely_target->function_symbol ();
3385 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3386 return NULL;
3387 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3388 && likely_target->can_be_discarded_p ())
3389 return NULL;
3390 return likely_target;
3391 }
3392
3393 /* The ipa-devirt pass.
3394 When polymorphic call has only one likely target in the unit,
3395 turn it into a speculative call. */
3396
3397 static unsigned int
3398 ipa_devirt (void)
3399 {
3400 struct cgraph_node *n;
3401 hash_set<void *> bad_call_targets;
3402 struct cgraph_edge *e;
3403
3404 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3405 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3406 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3407 int ndropped = 0;
3408
3409 if (!odr_types_ptr)
3410 return 0;
3411
3412 if (dump_file)
3413 dump_type_inheritance_graph (dump_file);
3414
3415 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3416 This is implemented by setting up final_warning_records that are updated
3417 by get_polymorphic_call_targets.
3418 We need to clear cache in this case to trigger recomputation of all
3419 entries. */
3420 if (warn_suggest_final_methods || warn_suggest_final_types)
3421 {
3422 final_warning_records = new (final_warning_record);
3423 final_warning_records->type_warnings = vNULL;
3424 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3425 free_polymorphic_call_targets_hash ();
3426 }
3427
3428 FOR_EACH_DEFINED_FUNCTION (n)
3429 {
3430 bool update = false;
3431 if (!opt_for_fn (n->decl, flag_devirtualize))
3432 continue;
3433 if (dump_file && n->indirect_calls)
3434 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3435 n->name (), n->order);
3436 for (e = n->indirect_calls; e; e = e->next_callee)
3437 if (e->indirect_info->polymorphic)
3438 {
3439 struct cgraph_node *likely_target = NULL;
3440 void *cache_token;
3441 bool final;
3442
3443 if (final_warning_records)
3444 final_warning_records->dyn_count = e->count;
3445
3446 vec <cgraph_node *>targets
3447 = possible_polymorphic_call_targets
3448 (e, &final, &cache_token, true);
3449 unsigned int i;
3450
3451 /* Trigger warnings by calculating non-speculative targets. */
3452 if (warn_suggest_final_methods || warn_suggest_final_types)
3453 possible_polymorphic_call_targets (e);
3454
3455 if (dump_file)
3456 dump_possible_polymorphic_call_targets
3457 (dump_file, e);
3458
3459 npolymorphic++;
3460
3461 /* See if the call can be devirtualized by means of ipa-prop's
3462 polymorphic call context propagation. If not, we can just
3463 forget about this call being polymorphic and avoid some heavy
3464 lifting in remove_unreachable_nodes that will otherwise try to
3465 keep all possible targets alive until inlining and in the inliner
3466 itself.
3467
3468 This may need to be revisited once we add further ways to use
3469 the may edges, but it is a resonable thing to do right now. */
3470
3471 if ((e->indirect_info->param_index == -1
3472 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3473 && e->indirect_info->vptr_changed))
3474 && !flag_ltrans_devirtualize)
3475 {
3476 e->indirect_info->polymorphic = false;
3477 ndropped++;
3478 if (dump_file)
3479 fprintf (dump_file, "Dropping polymorphic call info;"
3480 " it can not be used by ipa-prop\n");
3481 }
3482
3483 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3484 continue;
3485
3486 if (!e->maybe_hot_p ())
3487 {
3488 if (dump_file)
3489 fprintf (dump_file, "Call is cold\n\n");
3490 ncold++;
3491 continue;
3492 }
3493 if (e->speculative)
3494 {
3495 if (dump_file)
3496 fprintf (dump_file, "Call is already speculated\n\n");
3497 nspeculated++;
3498
3499 /* When dumping see if we agree with speculation. */
3500 if (!dump_file)
3501 continue;
3502 }
3503 if (bad_call_targets.contains (cache_token))
3504 {
3505 if (dump_file)
3506 fprintf (dump_file, "Target list is known to be useless\n\n");
3507 nmultiple++;
3508 continue;
3509 }
3510 for (i = 0; i < targets.length (); i++)
3511 if (likely_target_p (targets[i]))
3512 {
3513 if (likely_target)
3514 {
3515 likely_target = NULL;
3516 if (dump_file)
3517 fprintf (dump_file, "More than one likely target\n\n");
3518 nmultiple++;
3519 break;
3520 }
3521 likely_target = targets[i];
3522 }
3523 if (!likely_target)
3524 {
3525 bad_call_targets.add (cache_token);
3526 continue;
3527 }
3528 /* This is reached only when dumping; check if we agree or disagree
3529 with the speculation. */
3530 if (e->speculative)
3531 {
3532 struct cgraph_edge *e2;
3533 struct ipa_ref *ref;
3534 e->speculative_call_info (e2, e, ref);
3535 if (e2->callee->ultimate_alias_target ()
3536 == likely_target->ultimate_alias_target ())
3537 {
3538 fprintf (dump_file, "We agree with speculation\n\n");
3539 nok++;
3540 }
3541 else
3542 {
3543 fprintf (dump_file, "We disagree with speculation\n\n");
3544 nwrong++;
3545 }
3546 continue;
3547 }
3548 if (!likely_target->definition)
3549 {
3550 if (dump_file)
3551 fprintf (dump_file, "Target is not a definition\n\n");
3552 nnotdefined++;
3553 continue;
3554 }
3555 /* Do not introduce new references to external symbols. While we
3556 can handle these just well, it is common for programs to
3557 incorrectly with headers defining methods they are linked
3558 with. */
3559 if (DECL_EXTERNAL (likely_target->decl))
3560 {
3561 if (dump_file)
3562 fprintf (dump_file, "Target is external\n\n");
3563 nexternal++;
3564 continue;
3565 }
3566 /* Don't use an implicitly-declared destructor (c++/58678). */
3567 struct cgraph_node *non_thunk_target
3568 = likely_target->function_symbol ();
3569 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3570 {
3571 if (dump_file)
3572 fprintf (dump_file, "Target is artificial\n\n");
3573 nartificial++;
3574 continue;
3575 }
3576 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3577 && likely_target->can_be_discarded_p ())
3578 {
3579 if (dump_file)
3580 fprintf (dump_file, "Target is overwritable\n\n");
3581 noverwritable++;
3582 continue;
3583 }
3584 else if (dbg_cnt (devirt))
3585 {
3586 if (dump_enabled_p ())
3587 {
3588 location_t locus = gimple_location_safe (e->call_stmt);
3589 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3590 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3591 n->name (), n->order,
3592 likely_target->name (),
3593 likely_target->order);
3594 }
3595 if (!likely_target->can_be_discarded_p ())
3596 {
3597 cgraph_node *alias;
3598 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3599 if (alias)
3600 likely_target = alias;
3601 }
3602 nconverted++;
3603 update = true;
3604 e->make_speculative
3605 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3606 }
3607 }
3608 if (update)
3609 inline_update_overall_summary (n);
3610 }
3611 if (warn_suggest_final_methods || warn_suggest_final_types)
3612 {
3613 if (warn_suggest_final_types)
3614 {
3615 final_warning_records->type_warnings.qsort (type_warning_cmp);
3616 for (unsigned int i = 0;
3617 i < final_warning_records->type_warnings.length (); i++)
3618 if (final_warning_records->type_warnings[i].count)
3619 {
3620 tree type = final_warning_records->type_warnings[i].type;
3621 int count = final_warning_records->type_warnings[i].count;
3622 long long dyn_count
3623 = final_warning_records->type_warnings[i].dyn_count;
3624
3625 if (!dyn_count)
3626 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3627 OPT_Wsuggest_final_types, count,
3628 "Declaring type %qD final "
3629 "would enable devirtualization of %i call",
3630 "Declaring type %qD final "
3631 "would enable devirtualization of %i calls",
3632 type,
3633 count);
3634 else
3635 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3636 OPT_Wsuggest_final_types, count,
3637 "Declaring type %qD final "
3638 "would enable devirtualization of %i call "
3639 "executed %lli times",
3640 "Declaring type %qD final "
3641 "would enable devirtualization of %i calls "
3642 "executed %lli times",
3643 type,
3644 count,
3645 dyn_count);
3646 }
3647 }
3648
3649 if (warn_suggest_final_methods)
3650 {
3651 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3652
3653 final_warning_records->decl_warnings.traverse
3654 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3655 decl_warnings_vec.qsort (decl_warning_cmp);
3656 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3657 {
3658 tree decl = decl_warnings_vec[i]->decl;
3659 int count = decl_warnings_vec[i]->count;
3660 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3661
3662 if (!dyn_count)
3663 if (DECL_CXX_DESTRUCTOR_P (decl))
3664 warning_n (DECL_SOURCE_LOCATION (decl),
3665 OPT_Wsuggest_final_methods, count,
3666 "Declaring virtual destructor of %qD final "
3667 "would enable devirtualization of %i call",
3668 "Declaring virtual destructor of %qD final "
3669 "would enable devirtualization of %i calls",
3670 DECL_CONTEXT (decl), count);
3671 else
3672 warning_n (DECL_SOURCE_LOCATION (decl),
3673 OPT_Wsuggest_final_methods, count,
3674 "Declaring method %qD final "
3675 "would enable devirtualization of %i call",
3676 "Declaring method %qD final "
3677 "would enable devirtualization of %i calls",
3678 decl, count);
3679 else if (DECL_CXX_DESTRUCTOR_P (decl))
3680 warning_n (DECL_SOURCE_LOCATION (decl),
3681 OPT_Wsuggest_final_methods, count,
3682 "Declaring virtual destructor of %qD final "
3683 "would enable devirtualization of %i call "
3684 "executed %lli times",
3685 "Declaring virtual destructor of %qD final "
3686 "would enable devirtualization of %i calls "
3687 "executed %lli times",
3688 DECL_CONTEXT (decl), count, dyn_count);
3689 else
3690 warning_n (DECL_SOURCE_LOCATION (decl),
3691 OPT_Wsuggest_final_methods, count,
3692 "Declaring method %qD final "
3693 "would enable devirtualization of %i call "
3694 "executed %lli times",
3695 "Declaring method %qD final "
3696 "would enable devirtualization of %i calls "
3697 "executed %lli times",
3698 decl, count, dyn_count);
3699 }
3700 }
3701
3702 delete (final_warning_records);
3703 final_warning_records = 0;
3704 }
3705
3706 if (dump_file)
3707 fprintf (dump_file,
3708 "%i polymorphic calls, %i devirtualized,"
3709 " %i speculatively devirtualized, %i cold\n"
3710 "%i have multiple targets, %i overwritable,"
3711 " %i already speculated (%i agree, %i disagree),"
3712 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3713 npolymorphic, ndevirtualized, nconverted, ncold,
3714 nmultiple, noverwritable, nspeculated, nok, nwrong,
3715 nexternal, nnotdefined, nartificial, ndropped);
3716 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3717 }
3718
3719 namespace {
3720
3721 const pass_data pass_data_ipa_devirt =
3722 {
3723 IPA_PASS, /* type */
3724 "devirt", /* name */
3725 OPTGROUP_NONE, /* optinfo_flags */
3726 TV_IPA_DEVIRT, /* tv_id */
3727 0, /* properties_required */
3728 0, /* properties_provided */
3729 0, /* properties_destroyed */
3730 0, /* todo_flags_start */
3731 ( TODO_dump_symtab ), /* todo_flags_finish */
3732 };
3733
3734 class pass_ipa_devirt : public ipa_opt_pass_d
3735 {
3736 public:
3737 pass_ipa_devirt (gcc::context *ctxt)
3738 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3739 NULL, /* generate_summary */
3740 NULL, /* write_summary */
3741 NULL, /* read_summary */
3742 NULL, /* write_optimization_summary */
3743 NULL, /* read_optimization_summary */
3744 NULL, /* stmt_fixup */
3745 0, /* function_transform_todo_flags_start */
3746 NULL, /* function_transform */
3747 NULL) /* variable_transform */
3748 {}
3749
3750 /* opt_pass methods: */
3751 virtual bool gate (function *)
3752 {
3753 /* In LTO, always run the IPA passes and decide on function basis if the
3754 pass is enabled. */
3755 if (in_lto_p)
3756 return true;
3757 return (flag_devirtualize
3758 && (flag_devirtualize_speculatively
3759 || (warn_suggest_final_methods
3760 || warn_suggest_final_types))
3761 && optimize);
3762 }
3763
3764 virtual unsigned int execute (function *) { return ipa_devirt (); }
3765
3766 }; // class pass_ipa_devirt
3767
3768 } // anon namespace
3769
3770 ipa_opt_pass_d *
3771 make_pass_ipa_devirt (gcc::context *ctxt)
3772 {
3773 return new pass_ipa_devirt (ctxt);
3774 }
3775
3776 #include "gt-ipa-devirt.h"