]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "backend.h"
112 #include "tree.h"
113 #include "gimple.h"
114 #include "rtl.h"
115 #include "alias.h"
116 #include "fold-const.h"
117 #include "print-tree.h"
118 #include "calls.h"
119 #include "cgraph.h"
120 #include "flags.h"
121 #include "insn-config.h"
122 #include "expmed.h"
123 #include "dojump.h"
124 #include "explow.h"
125 #include "emit-rtl.h"
126 #include "varasm.h"
127 #include "stmt.h"
128 #include "expr.h"
129 #include "tree-pass.h"
130 #include "target.h"
131 #include "tree-pretty-print.h"
132 #include "ipa-utils.h"
133 #include "internal-fn.h"
134 #include "gimple-fold.h"
135 #include "alloc-pool.h"
136 #include "symbol-summary.h"
137 #include "ipa-prop.h"
138 #include "ipa-inline.h"
139 #include "diagnostic.h"
140 #include "tree-dfa.h"
141 #include "demangle.h"
142 #include "dbgcnt.h"
143 #include "gimple-pretty-print.h"
144 #include "stor-layout.h"
145 #include "intl.h"
146 #include "streamer-hooks.h"
147 #include "lto-streamer.h"
148
149 /* Hash based set of pairs of types. */
150 typedef struct
151 {
152 tree first;
153 tree second;
154 } type_pair;
155
156 template <>
157 struct default_hash_traits <type_pair> : typed_noop_remove <type_pair>
158 {
159 typedef type_pair value_type;
160 typedef type_pair compare_type;
161 static hashval_t
162 hash (type_pair p)
163 {
164 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
165 }
166 static bool
167 is_empty (type_pair p)
168 {
169 return p.first == NULL;
170 }
171 static bool
172 is_deleted (type_pair p ATTRIBUTE_UNUSED)
173 {
174 return false;
175 }
176 static bool
177 equal (const type_pair &a, const type_pair &b)
178 {
179 return a.first==b.first && a.second == b.second;
180 }
181 static void
182 mark_empty (type_pair &e)
183 {
184 e.first = NULL;
185 }
186 };
187
188 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
189 hash_set<type_pair> *,
190 location_t, location_t);
191
192 static bool odr_violation_reported = false;
193
194
195 /* Pointer set of all call targets appearing in the cache. */
196 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
197
198 /* The node of type inheritance graph. For each type unique in
199 One Definition Rule (ODR) sense, we produce one node linking all
200 main variants of types equivalent to it, bases and derived types. */
201
202 struct GTY(()) odr_type_d
203 {
204 /* leader type. */
205 tree type;
206 /* All bases; built only for main variants of types. */
207 vec<odr_type> GTY((skip)) bases;
208 /* All derived types with virtual methods seen in unit;
209 built only for main variants of types. */
210 vec<odr_type> GTY((skip)) derived_types;
211
212 /* All equivalent types, if more than one. */
213 vec<tree, va_gc> *types;
214 /* Set of all equivalent types, if NON-NULL. */
215 hash_set<tree> * GTY((skip)) types_set;
216
217 /* Unique ID indexing the type in odr_types array. */
218 int id;
219 /* Is it in anonymous namespace? */
220 bool anonymous_namespace;
221 /* Do we know about all derivations of given type? */
222 bool all_derivations_known;
223 /* Did we report ODR violation here? */
224 bool odr_violated;
225 /* Set when virtual table without RTTI previaled table with. */
226 bool rtti_broken;
227 };
228
229 /* Return true if T is a type with linkage defined. */
230
231 bool
232 type_with_linkage_p (const_tree t)
233 {
234 /* Builtin types do not define linkage, their TYPE_CONTEXT is NULL. */
235 if (!TYPE_CONTEXT (t)
236 || !TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL
237 || !TYPE_STUB_DECL (t))
238 return false;
239
240 /* In LTO do not get confused by non-C++ produced types or types built
241 with -fno-lto-odr-type-merigng. */
242 if (in_lto_p)
243 {
244 /* To support -fno-lto-odr-type-merigng recognize types with vtables
245 to have linkage. */
246 if (RECORD_OR_UNION_TYPE_P (t)
247 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
248 return true;
249 /* Do not accept any other types - we do not know if they were produced
250 by C++ FE. */
251 if (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
252 return false;
253 }
254
255 return (RECORD_OR_UNION_TYPE_P (t)
256 || TREE_CODE (t) == ENUMERAL_TYPE);
257 }
258
259 /* Return true if T is in anonymous namespace.
260 This works only on those C++ types with linkage defined. */
261
262 bool
263 type_in_anonymous_namespace_p (const_tree t)
264 {
265 gcc_assert (type_with_linkage_p (t));
266
267 /* Keep -fno-lto-odr-type-merging working by recognizing classes with vtables
268 properly into anonymous namespaces. */
269 if (RECORD_OR_UNION_TYPE_P (t)
270 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
271 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
272
273 if (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)))
274 {
275 /* C++ FE uses magic <anon> as assembler names of anonymous types.
276 verify that this match with type_in_anonymous_namespace_p. */
277 #ifdef ENABLE_CHECKING
278 if (in_lto_p)
279 gcc_assert (!strcmp ("<anon>",
280 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))));
281 #endif
282 return true;
283 }
284 return false;
285 }
286
287 /* Return true of T is type with One Definition Rule info attached.
288 It means that either it is anonymous type or it has assembler name
289 set. */
290
291 bool
292 odr_type_p (const_tree t)
293 {
294 /* We do not have this information when not in LTO, but we do not need
295 to care, since it is used only for type merging. */
296 gcc_checking_assert (in_lto_p || flag_lto);
297
298 /* To support -fno-lto-odr-type-merging consider types with vtables ODR. */
299 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
300 return true;
301
302 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
303 && (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))))
304 {
305 #ifdef ENABLE_CHECKING
306 /* C++ FE uses magic <anon> as assembler names of anonymous types.
307 verify that this match with type_in_anonymous_namespace_p. */
308 gcc_assert (!type_with_linkage_p (t)
309 || strcmp ("<anon>",
310 IDENTIFIER_POINTER
311 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))))
312 || type_in_anonymous_namespace_p (t));
313 #endif
314 return true;
315 }
316 return false;
317 }
318
319 /* Return TRUE if all derived types of T are known and thus
320 we may consider the walk of derived type complete.
321
322 This is typically true only for final anonymous namespace types and types
323 defined within functions (that may be COMDAT and thus shared across units,
324 but with the same set of derived types). */
325
326 bool
327 type_all_derivations_known_p (const_tree t)
328 {
329 if (TYPE_FINAL_P (t))
330 return true;
331 if (flag_ltrans)
332 return false;
333 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
334 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
335 return true;
336 if (type_in_anonymous_namespace_p (t))
337 return true;
338 return (decl_function_context (TYPE_NAME (t)) != NULL);
339 }
340
341 /* Return TRUE if type's constructors are all visible. */
342
343 static bool
344 type_all_ctors_visible_p (tree t)
345 {
346 return !flag_ltrans
347 && symtab->state >= CONSTRUCTION
348 /* We can not always use type_all_derivations_known_p.
349 For function local types we must assume case where
350 the function is COMDAT and shared in between units.
351
352 TODO: These cases are quite easy to get, but we need
353 to keep track of C++ privatizing via -Wno-weak
354 as well as the IPA privatizing. */
355 && type_in_anonymous_namespace_p (t);
356 }
357
358 /* Return TRUE if type may have instance. */
359
360 static bool
361 type_possibly_instantiated_p (tree t)
362 {
363 tree vtable;
364 varpool_node *vnode;
365
366 /* TODO: Add abstract types here. */
367 if (!type_all_ctors_visible_p (t))
368 return true;
369
370 vtable = BINFO_VTABLE (TYPE_BINFO (t));
371 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
372 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
373 vnode = varpool_node::get (vtable);
374 return vnode && vnode->definition;
375 }
376
377 /* Hash used to unify ODR types based on their mangled name and for anonymous
378 namespace types. */
379
380 struct odr_name_hasher : pointer_hash <odr_type_d>
381 {
382 typedef union tree_node *compare_type;
383 static inline hashval_t hash (const odr_type_d *);
384 static inline bool equal (const odr_type_d *, const tree_node *);
385 static inline void remove (odr_type_d *);
386 };
387
388 /* Has used to unify ODR types based on their associated virtual table.
389 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
390 only polymorphic types. Types with mangled names are inserted to both. */
391
392 struct odr_vtable_hasher:odr_name_hasher
393 {
394 static inline hashval_t hash (const odr_type_d *);
395 static inline bool equal (const odr_type_d *, const tree_node *);
396 };
397
398 /* Return type that was declared with T's name so that T is an
399 qualified variant of it. */
400
401 static inline tree
402 main_odr_variant (const_tree t)
403 {
404 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
405 return TREE_TYPE (TYPE_NAME (t));
406 /* Unnamed types and non-C++ produced types can be compared by variants. */
407 else
408 return TYPE_MAIN_VARIANT (t);
409 }
410
411 static bool
412 can_be_name_hashed_p (tree t)
413 {
414 return (!in_lto_p || odr_type_p (t));
415 }
416
417 /* Hash type by its ODR name. */
418
419 static hashval_t
420 hash_odr_name (const_tree t)
421 {
422 gcc_checking_assert (main_odr_variant (t) == t);
423
424 /* If not in LTO, all main variants are unique, so we can do
425 pointer hash. */
426 if (!in_lto_p)
427 return htab_hash_pointer (t);
428
429 /* Anonymous types are unique. */
430 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
431 return htab_hash_pointer (t);
432
433 gcc_checking_assert (TYPE_NAME (t)
434 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
435 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
436 }
437
438 /* Return the computed hashcode for ODR_TYPE. */
439
440 inline hashval_t
441 odr_name_hasher::hash (const odr_type_d *odr_type)
442 {
443 return hash_odr_name (odr_type->type);
444 }
445
446 static bool
447 can_be_vtable_hashed_p (tree t)
448 {
449 /* vtable hashing can distinguish only main variants. */
450 if (TYPE_MAIN_VARIANT (t) != t)
451 return false;
452 /* Anonymous namespace types are always handled by name hash. */
453 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
454 return false;
455 return (TREE_CODE (t) == RECORD_TYPE
456 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
457 }
458
459 /* Hash type by assembler name of its vtable. */
460
461 static hashval_t
462 hash_odr_vtable (const_tree t)
463 {
464 tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
465 inchash::hash hstate;
466
467 gcc_checking_assert (in_lto_p);
468 gcc_checking_assert (!type_in_anonymous_namespace_p (t));
469 gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
470 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
471 gcc_checking_assert (main_odr_variant (t) == t);
472
473 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
474 {
475 add_expr (TREE_OPERAND (v, 1), hstate);
476 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
477 }
478
479 hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
480 return hstate.end ();
481 }
482
483 /* Return the computed hashcode for ODR_TYPE. */
484
485 inline hashval_t
486 odr_vtable_hasher::hash (const odr_type_d *odr_type)
487 {
488 return hash_odr_vtable (odr_type->type);
489 }
490
491 /* For languages with One Definition Rule, work out if
492 types are the same based on their name.
493
494 This is non-trivial for LTO where minor differences in
495 the type representation may have prevented type merging
496 to merge two copies of otherwise equivalent type.
497
498 Until we start streaming mangled type names, this function works
499 only for polymorphic types.
500
501 When STRICT is true, we compare types by their names for purposes of
502 ODR violation warnings. When strict is false, we consider variants
503 equivalent, becuase it is all that matters for devirtualization machinery.
504 */
505
506 bool
507 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
508 {
509 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
510
511 type1 = main_odr_variant (type1);
512 type2 = main_odr_variant (type2);
513 if (!strict)
514 {
515 type1 = TYPE_MAIN_VARIANT (type1);
516 type2 = TYPE_MAIN_VARIANT (type2);
517 }
518
519 if (type1 == type2)
520 return true;
521
522 if (!in_lto_p)
523 return false;
524
525 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
526 on the corresponding TYPE_STUB_DECL. */
527 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
528 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
529 return false;
530
531
532 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
533
534 Ideally we should never need types without ODR names here. It can however
535 happen in two cases:
536
537 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
538 Here testing for equivalence is safe, since their MAIN_VARIANTs are
539 unique.
540 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
541 establish precise ODR equivalency, but for correctness we care only
542 about equivalency on complete polymorphic types. For these we can
543 compare assembler names of their virtual tables. */
544 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
545 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
546 {
547 /* See if types are obviously different (i.e. different codes
548 or polymorphic wrt non-polymorphic). This is not strictly correct
549 for ODR violating programs, but we can't do better without streaming
550 ODR names. */
551 if (TREE_CODE (type1) != TREE_CODE (type2))
552 return false;
553 if (TREE_CODE (type1) == RECORD_TYPE
554 && (TYPE_BINFO (type1) == NULL_TREE)
555 != (TYPE_BINFO (type1) == NULL_TREE))
556 return false;
557 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
558 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
559 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
560 return false;
561
562 /* At the moment we have no way to establish ODR equivalence at LTO
563 other than comparing virtual table pointers of polymorphic types.
564 Eventually we should start saving mangled names in TYPE_NAME.
565 Then this condition will become non-trivial. */
566
567 if (TREE_CODE (type1) == RECORD_TYPE
568 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
569 && BINFO_VTABLE (TYPE_BINFO (type1))
570 && BINFO_VTABLE (TYPE_BINFO (type2)))
571 {
572 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
573 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
574 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
575 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
576 return (operand_equal_p (TREE_OPERAND (v1, 1),
577 TREE_OPERAND (v2, 1), 0)
578 && DECL_ASSEMBLER_NAME
579 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
580 == DECL_ASSEMBLER_NAME
581 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
582 }
583 gcc_unreachable ();
584 }
585 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
586 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
587 }
588
589 /* Return true if we can decide on ODR equivalency.
590
591 In non-LTO it is always decide, in LTO however it depends in the type has
592 ODR info attached.
593
594 When STRICT is false, compare main variants. */
595
596 bool
597 types_odr_comparable (tree t1, tree t2, bool strict)
598 {
599 return (!in_lto_p
600 || (strict ? (main_odr_variant (t1) == main_odr_variant (t2)
601 && main_odr_variant (t1))
602 : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
603 || (odr_type_p (t1) && odr_type_p (t2))
604 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
605 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
606 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
607 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
608 }
609
610 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
611 known, be conservative and return false. */
612
613 bool
614 types_must_be_same_for_odr (tree t1, tree t2)
615 {
616 if (types_odr_comparable (t1, t2))
617 return types_same_for_odr (t1, t2);
618 else
619 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
620 }
621
622 /* If T is compound type, return type it is based on. */
623
624 static tree
625 compound_type_base (const_tree t)
626 {
627 if (TREE_CODE (t) == ARRAY_TYPE
628 || POINTER_TYPE_P (t)
629 || TREE_CODE (t) == COMPLEX_TYPE
630 || VECTOR_TYPE_P (t))
631 return TREE_TYPE (t);
632 if (TREE_CODE (t) == METHOD_TYPE)
633 return TYPE_METHOD_BASETYPE (t);
634 if (TREE_CODE (t) == OFFSET_TYPE)
635 return TYPE_OFFSET_BASETYPE (t);
636 return NULL_TREE;
637 }
638
639 /* Return true if T is either ODR type or compound type based from it.
640 If the function return true, we know that T is a type originating from C++
641 source even at link-time. */
642
643 bool
644 odr_or_derived_type_p (const_tree t)
645 {
646 do
647 {
648 if (odr_type_p (t))
649 return true;
650 /* Function type is a tricky one. Basically we can consider it
651 ODR derived if return type or any of the parameters is.
652 We need to check all parameters because LTO streaming merges
653 common types (such as void) and they are not considered ODR then. */
654 if (TREE_CODE (t) == FUNCTION_TYPE)
655 {
656 if (TYPE_METHOD_BASETYPE (t))
657 t = TYPE_METHOD_BASETYPE (t);
658 else
659 {
660 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
661 return true;
662 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
663 if (odr_or_derived_type_p (TREE_VALUE (t)))
664 return true;
665 return false;
666 }
667 }
668 else
669 t = compound_type_base (t);
670 }
671 while (t);
672 return t;
673 }
674
675 /* Compare types T1 and T2 and return true if they are
676 equivalent. */
677
678 inline bool
679 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
680 {
681 tree t1 = o1->type;
682
683 gcc_checking_assert (main_odr_variant (t2) == t2);
684 gcc_checking_assert (main_odr_variant (t1) == t1);
685 if (t1 == t2)
686 return true;
687 if (!in_lto_p)
688 return false;
689 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
690 on the corresponding TYPE_STUB_DECL. */
691 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
692 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
693 return false;
694 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
695 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
696 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
697 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
698 }
699
700 /* Compare types T1 and T2 and return true if they are
701 equivalent. */
702
703 inline bool
704 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
705 {
706 tree t1 = o1->type;
707
708 gcc_checking_assert (main_odr_variant (t2) == t2);
709 gcc_checking_assert (main_odr_variant (t1) == t1);
710 gcc_checking_assert (in_lto_p);
711 t1 = TYPE_MAIN_VARIANT (t1);
712 t2 = TYPE_MAIN_VARIANT (t2);
713 if (t1 == t2)
714 return true;
715 tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
716 tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
717 return (operand_equal_p (TREE_OPERAND (v1, 1),
718 TREE_OPERAND (v2, 1), 0)
719 && DECL_ASSEMBLER_NAME
720 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
721 == DECL_ASSEMBLER_NAME
722 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
723 }
724
725 /* Free ODR type V. */
726
727 inline void
728 odr_name_hasher::remove (odr_type_d *v)
729 {
730 v->bases.release ();
731 v->derived_types.release ();
732 if (v->types_set)
733 delete v->types_set;
734 ggc_free (v);
735 }
736
737 /* ODR type hash used to look up ODR type based on tree type node. */
738
739 typedef hash_table<odr_name_hasher> odr_hash_type;
740 static odr_hash_type *odr_hash;
741 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
742 static odr_vtable_hash_type *odr_vtable_hash;
743
744 /* ODR types are also stored into ODR_TYPE vector to allow consistent
745 walking. Bases appear before derived types. Vector is garbage collected
746 so we won't end up visiting empty types. */
747
748 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
749 #define odr_types (*odr_types_ptr)
750
751 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
752 void
753 set_type_binfo (tree type, tree binfo)
754 {
755 for (; type; type = TYPE_NEXT_VARIANT (type))
756 if (COMPLETE_TYPE_P (type))
757 TYPE_BINFO (type) = binfo;
758 else
759 gcc_assert (!TYPE_BINFO (type));
760 }
761
762 /* Compare T2 and T2 based on name or structure. */
763
764 static bool
765 odr_subtypes_equivalent_p (tree t1, tree t2,
766 hash_set<type_pair> *visited,
767 location_t loc1, location_t loc2)
768 {
769
770 /* This can happen in incomplete types that should be handled earlier. */
771 gcc_assert (t1 && t2);
772
773 t1 = main_odr_variant (t1);
774 t2 = main_odr_variant (t2);
775 if (t1 == t2)
776 return true;
777
778 /* Anonymous namespace types must match exactly. */
779 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
780 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
781 return false;
782
783 /* For ODR types be sure to compare their names.
784 To support -wno-odr-type-merging we allow one type to be non-ODR
785 and other ODR even though it is a violation. */
786 if (types_odr_comparable (t1, t2, true))
787 {
788 if (!types_same_for_odr (t1, t2, true))
789 return false;
790 /* Limit recursion: If subtypes are ODR types and we know
791 that they are same, be happy. */
792 if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
793 return true;
794 }
795
796 /* Component types, builtins and possibly violating ODR types
797 have to be compared structurally. */
798 if (TREE_CODE (t1) != TREE_CODE (t2))
799 return false;
800 if (AGGREGATE_TYPE_P (t1)
801 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
802 return false;
803
804 type_pair pair={t1,t2};
805 if (TYPE_UID (t1) > TYPE_UID (t2))
806 {
807 pair.first = t2;
808 pair.second = t1;
809 }
810 if (visited->add (pair))
811 return true;
812 return odr_types_equivalent_p (t1, t2, false, NULL, visited, loc1, loc2);
813 }
814
815 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
816 violation warnings. */
817
818 void
819 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
820 {
821 int n1, n2;
822
823 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
824 {
825 odr_violation_reported = true;
826 if (DECL_VIRTUAL_P (prevailing->decl))
827 {
828 varpool_node *tmp = prevailing;
829 prevailing = vtable;
830 vtable = tmp;
831 }
832 if (warning_at (DECL_SOURCE_LOCATION
833 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
834 OPT_Wodr,
835 "virtual table of type %qD violates one definition rule",
836 DECL_CONTEXT (vtable->decl)))
837 inform (DECL_SOURCE_LOCATION (prevailing->decl),
838 "variable of same assembler name as the virtual table is "
839 "defined in another translation unit");
840 return;
841 }
842 if (!prevailing->definition || !vtable->definition)
843 return;
844
845 /* If we do not stream ODR type info, do not bother to do useful compare. */
846 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
847 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
848 return;
849
850 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
851
852 if (class_type->odr_violated)
853 return;
854
855 for (n1 = 0, n2 = 0; true; n1++, n2++)
856 {
857 struct ipa_ref *ref1, *ref2;
858 bool end1, end2;
859
860 end1 = !prevailing->iterate_reference (n1, ref1);
861 end2 = !vtable->iterate_reference (n2, ref2);
862
863 /* !DECL_VIRTUAL_P means RTTI entry;
864 We warn when RTTI is lost because non-RTTI previals; we silently
865 accept the other case. */
866 while (!end2
867 && (end1
868 || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
869 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
870 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
871 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
872 {
873 if (!class_type->rtti_broken
874 && warning_at (DECL_SOURCE_LOCATION
875 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
876 OPT_Wodr,
877 "virtual table of type %qD contains RTTI "
878 "information",
879 DECL_CONTEXT (vtable->decl)))
880 {
881 inform (DECL_SOURCE_LOCATION
882 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
883 "but is prevailed by one without from other translation "
884 "unit");
885 inform (DECL_SOURCE_LOCATION
886 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
887 "RTTI will not work on this type");
888 class_type->rtti_broken = true;
889 }
890 n2++;
891 end2 = !vtable->iterate_reference (n2, ref2);
892 }
893 while (!end1
894 && (end2
895 || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
896 != DECL_ASSEMBLER_NAME (ref1->referred->decl)
897 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
898 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
899 {
900 n1++;
901 end1 = !prevailing->iterate_reference (n1, ref1);
902 }
903
904 /* Finished? */
905 if (end1 && end2)
906 {
907 /* Extra paranoia; compare the sizes. We do not have information
908 about virtual inheritance offsets, so just be sure that these
909 match.
910 Do this as very last check so the not very informative error
911 is not output too often. */
912 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
913 {
914 class_type->odr_violated = true;
915 if (warning_at (DECL_SOURCE_LOCATION
916 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
917 OPT_Wodr,
918 "virtual table of type %qD violates "
919 "one definition rule ",
920 DECL_CONTEXT (vtable->decl)))
921 {
922 inform (DECL_SOURCE_LOCATION
923 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
924 "the conflicting type defined in another translation "
925 "unit has virtual table of different size");
926 }
927 }
928 return;
929 }
930
931 if (!end1 && !end2)
932 {
933 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
934 == DECL_ASSEMBLER_NAME (ref2->referred->decl))
935 continue;
936
937 class_type->odr_violated = true;
938
939 /* If the loops above stopped on non-virtual pointer, we have
940 mismatch in RTTI information mangling. */
941 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
942 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
943 {
944 if (warning_at (DECL_SOURCE_LOCATION
945 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
946 OPT_Wodr,
947 "virtual table of type %qD violates "
948 "one definition rule ",
949 DECL_CONTEXT (vtable->decl)))
950 {
951 inform (DECL_SOURCE_LOCATION
952 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
953 "the conflicting type defined in another translation "
954 "unit with different RTTI information");
955 }
956 return;
957 }
958 /* At this point both REF1 and REF2 points either to virtual table
959 or virtual method. If one points to virtual table and other to
960 method we can complain the same way as if one table was shorter
961 than other pointing out the extra method. */
962 if (TREE_CODE (ref1->referred->decl)
963 != TREE_CODE (ref2->referred->decl))
964 {
965 if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
966 end1 = true;
967 else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
968 end2 = true;
969 }
970 }
971
972 class_type->odr_violated = true;
973
974 /* Complain about size mismatch. Either we have too many virutal
975 functions or too many virtual table pointers. */
976 if (end1 || end2)
977 {
978 if (end1)
979 {
980 varpool_node *tmp = prevailing;
981 prevailing = vtable;
982 vtable = tmp;
983 ref1 = ref2;
984 }
985 if (warning_at (DECL_SOURCE_LOCATION
986 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
987 OPT_Wodr,
988 "virtual table of type %qD violates "
989 "one definition rule",
990 DECL_CONTEXT (vtable->decl)))
991 {
992 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
993 {
994 inform (DECL_SOURCE_LOCATION
995 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
996 "the conflicting type defined in another translation "
997 "unit");
998 inform (DECL_SOURCE_LOCATION
999 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
1000 "contains additional virtual method %qD",
1001 ref1->referred->decl);
1002 }
1003 else
1004 {
1005 inform (DECL_SOURCE_LOCATION
1006 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1007 "the conflicting type defined in another translation "
1008 "unit has virtual table table with more entries");
1009 }
1010 }
1011 return;
1012 }
1013
1014 /* And in the last case we have either mistmatch in between two virtual
1015 methods or two virtual table pointers. */
1016 if (warning_at (DECL_SOURCE_LOCATION
1017 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
1018 "virtual table of type %qD violates "
1019 "one definition rule ",
1020 DECL_CONTEXT (vtable->decl)))
1021 {
1022 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
1023 {
1024 inform (DECL_SOURCE_LOCATION
1025 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1026 "the conflicting type defined in another translation "
1027 "unit");
1028 gcc_assert (TREE_CODE (ref2->referred->decl)
1029 == FUNCTION_DECL);
1030 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
1031 "virtual method %qD", ref1->referred->decl);
1032 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
1033 "ought to match virtual method %qD but does not",
1034 ref2->referred->decl);
1035 }
1036 else
1037 inform (DECL_SOURCE_LOCATION
1038 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1039 "the conflicting type defined in another translation "
1040 "unit has virtual table table with different contents");
1041 return;
1042 }
1043 }
1044 }
1045
1046 /* Output ODR violation warning about T1 and T2 with REASON.
1047 Display location of ST1 and ST2 if REASON speaks about field or
1048 method of the type.
1049 If WARN is false, do nothing. Set WARNED if warning was indeed
1050 output. */
1051
1052 void
1053 warn_odr (tree t1, tree t2, tree st1, tree st2,
1054 bool warn, bool *warned, const char *reason)
1055 {
1056 tree decl2 = TYPE_NAME (t2);
1057 if (warned)
1058 *warned = false;
1059
1060 if (!warn || !TYPE_NAME(t1))
1061 return;
1062
1063 /* ODR warnings are output druing LTO streaming; we must apply location
1064 cache for potential warnings to be output correctly. */
1065 if (lto_location_cache::current_cache)
1066 lto_location_cache::current_cache->apply_location_cache ();
1067
1068 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
1069 "type %qT violates the C++ One Definition Rule",
1070 t1))
1071 return;
1072 if (!st1 && !st2)
1073 ;
1074 /* For FIELD_DECL support also case where one of fields is
1075 NULL - this is used when the structures have mismatching number of
1076 elements. */
1077 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
1078 {
1079 inform (DECL_SOURCE_LOCATION (decl2),
1080 "a different type is defined in another translation unit");
1081 if (!st1)
1082 {
1083 st1 = st2;
1084 st2 = NULL;
1085 }
1086 inform (DECL_SOURCE_LOCATION (st1),
1087 "the first difference of corresponding definitions is field %qD",
1088 st1);
1089 if (st2)
1090 decl2 = st2;
1091 }
1092 else if (TREE_CODE (st1) == FUNCTION_DECL)
1093 {
1094 inform (DECL_SOURCE_LOCATION (decl2),
1095 "a different type is defined in another translation unit");
1096 inform (DECL_SOURCE_LOCATION (st1),
1097 "the first difference of corresponding definitions is method %qD",
1098 st1);
1099 decl2 = st2;
1100 }
1101 else
1102 return;
1103 inform (DECL_SOURCE_LOCATION (decl2), reason);
1104
1105 if (warned)
1106 *warned = true;
1107 }
1108
1109 /* Return ture if T1 and T2 are incompatible and we want to recusively
1110 dive into them from warn_type_mismatch to give sensible answer. */
1111
1112 static bool
1113 type_mismatch_p (tree t1, tree t2)
1114 {
1115 if (odr_or_derived_type_p (t1) && odr_or_derived_type_p (t2)
1116 && !odr_types_equivalent_p (t1, t2))
1117 return true;
1118 return !types_compatible_p (t1, t2);
1119 }
1120
1121
1122 /* Types T1 and T2 was found to be incompatible in a context they can't
1123 (either used to declare a symbol of same assembler name or unified by
1124 ODR rule). We already output warning about this, but if possible, output
1125 extra information on how the types mismatch.
1126
1127 This is hard to do in general. We basically handle the common cases.
1128
1129 If LOC1 and LOC2 are meaningful locations, use it in the case the types
1130 themselves do no thave one.*/
1131
1132 void
1133 warn_types_mismatch (tree t1, tree t2, location_t loc1, location_t loc2)
1134 {
1135 /* Location of type is known only if it has TYPE_NAME and the name is
1136 TYPE_DECL. */
1137 location_t loc_t1 = TYPE_NAME (t1) && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1138 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1))
1139 : UNKNOWN_LOCATION;
1140 location_t loc_t2 = TYPE_NAME (t2) && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1141 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2))
1142 : UNKNOWN_LOCATION;
1143 bool loc_t2_useful = false;
1144
1145 /* With LTO it is a common case that the location of both types match.
1146 See if T2 has a location that is different from T1. If so, we will
1147 inform user about the location.
1148 Do not consider the location passed to us in LOC1/LOC2 as those are
1149 already output. */
1150 if (loc_t2 > BUILTINS_LOCATION && loc_t2 != loc_t1)
1151 {
1152 if (loc_t1 <= BUILTINS_LOCATION)
1153 loc_t2_useful = true;
1154 else
1155 {
1156 expanded_location xloc1 = expand_location (loc_t1);
1157 expanded_location xloc2 = expand_location (loc_t2);
1158
1159 if (strcmp (xloc1.file, xloc2.file)
1160 || xloc1.line != xloc2.line
1161 || xloc1.column != xloc2.column)
1162 loc_t2_useful = true;
1163 }
1164 }
1165
1166 if (loc_t1 <= BUILTINS_LOCATION)
1167 loc_t1 = loc1;
1168 if (loc_t2 <= BUILTINS_LOCATION)
1169 loc_t2 = loc2;
1170
1171 location_t loc = loc_t1 <= BUILTINS_LOCATION ? loc_t2 : loc_t1;
1172
1173 /* It is a quite common bug to reference anonymous namespace type in
1174 non-anonymous namespace class. */
1175 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1176 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1177 {
1178 if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1179 {
1180 std::swap (t1, t2);
1181 std::swap (loc_t1, loc_t2);
1182 }
1183 gcc_assert (TYPE_NAME (t1) && TYPE_NAME (t2)
1184 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1185 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL);
1186 /* Most of the time, the type names will match, do not be unnecesarily
1187 verbose. */
1188 if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1)))
1189 != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2))))
1190 inform (loc_t1,
1191 "type %qT defined in anonymous namespace can not match "
1192 "type %qT across the translation unit boundary",
1193 t1, t2);
1194 else
1195 inform (loc_t1,
1196 "type %qT defined in anonymous namespace can not match "
1197 "across the translation unit boundary",
1198 t1);
1199 if (loc_t2_useful)
1200 inform (loc_t2,
1201 "the incompatible type defined in another translation unit");
1202 return;
1203 }
1204 /* If types have mangled ODR names and they are different, it is most
1205 informative to output those.
1206 This also covers types defined in different namespaces. */
1207 if (TYPE_NAME (t1) && TYPE_NAME (t2)
1208 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1209 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL
1210 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1211 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1212 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1213 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1214 {
1215 char *name1 = xstrdup (cplus_demangle
1216 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1217 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1218 char *name2 = cplus_demangle
1219 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1220 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1221 if (name1 && name2 && strcmp (name1, name2))
1222 {
1223 inform (loc_t1,
1224 "type name %<%s%> should match type name %<%s%>",
1225 name1, name2);
1226 if (loc_t2_useful)
1227 inform (loc_t2,
1228 "the incompatible type is defined here");
1229 free (name1);
1230 return;
1231 }
1232 free (name1);
1233 }
1234 /* A tricky case are compound types. Often they appear the same in source
1235 code and the mismatch is dragged in by type they are build from.
1236 Look for those differences in subtypes and try to be informative. In other
1237 cases just output nothing because the source code is probably different
1238 and in this case we already output a all necessary info. */
1239 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1240 {
1241 if (TREE_CODE (t1) == TREE_CODE (t2))
1242 {
1243 if (TREE_CODE (t1) == ARRAY_TYPE
1244 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1245 {
1246 tree i1 = TYPE_DOMAIN (t1);
1247 tree i2 = TYPE_DOMAIN (t2);
1248
1249 if (i1 && i2
1250 && TYPE_MAX_VALUE (i1)
1251 && TYPE_MAX_VALUE (i2)
1252 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1253 TYPE_MAX_VALUE (i2), 0))
1254 {
1255 inform (loc,
1256 "array types have different bounds");
1257 return;
1258 }
1259 }
1260 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1261 && type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1262 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1, loc_t2);
1263 else if (TREE_CODE (t1) == METHOD_TYPE
1264 || TREE_CODE (t1) == FUNCTION_TYPE)
1265 {
1266 tree parms1 = NULL, parms2 = NULL;
1267 int count = 1;
1268
1269 if (type_mismatch_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1270 {
1271 inform (loc, "return value type mismatch");
1272 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc_t1,
1273 loc_t2);
1274 return;
1275 }
1276 if (prototype_p (t1) && prototype_p (t2))
1277 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1278 parms1 && parms2;
1279 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1280 count++)
1281 {
1282 if (type_mismatch_p (TREE_VALUE (parms1), TREE_VALUE (parms2)))
1283 {
1284 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1285 inform (loc,
1286 "implicit this pointer type mismatch");
1287 else
1288 inform (loc,
1289 "type mismatch in parameter %i",
1290 count - (TREE_CODE (t1) == METHOD_TYPE));
1291 warn_types_mismatch (TREE_VALUE (parms1),
1292 TREE_VALUE (parms2),
1293 loc_t1, loc_t2);
1294 return;
1295 }
1296 }
1297 if (parms1 || parms2)
1298 {
1299 inform (loc,
1300 "types have different parameter counts");
1301 return;
1302 }
1303 }
1304 }
1305 return;
1306 }
1307
1308 if (types_odr_comparable (t1, t2, true)
1309 && types_same_for_odr (t1, t2, true))
1310 inform (loc_t1,
1311 "type %qT itself violate the C++ One Definition Rule", t1);
1312 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1313 else if (TYPE_NAME (t1) == TYPE_NAME (t2)
1314 && TREE_CODE (t1) == TREE_CODE (t2) && !loc_t2_useful)
1315 return;
1316 else
1317 inform (loc_t1, "type %qT should match type %qT",
1318 t1, t2);
1319 if (loc_t2_useful)
1320 inform (loc_t2, "the incompatible type is defined here");
1321 }
1322
1323 /* Compare T1 and T2, report ODR violations if WARN is true and set
1324 WARNED to true if anything is reported. Return true if types match.
1325 If true is returned, the types are also compatible in the sense of
1326 gimple_canonical_types_compatible_p.
1327 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1328 about the type if the type itself do not have location. */
1329
1330 static bool
1331 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1332 hash_set<type_pair> *visited,
1333 location_t loc1, location_t loc2)
1334 {
1335 /* Check first for the obvious case of pointer identity. */
1336 if (t1 == t2)
1337 return true;
1338 gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1339 gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1340
1341 /* Can't be the same type if the types don't have the same code. */
1342 if (TREE_CODE (t1) != TREE_CODE (t2))
1343 {
1344 warn_odr (t1, t2, NULL, NULL, warn, warned,
1345 G_("a different type is defined in another translation unit"));
1346 return false;
1347 }
1348
1349 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1350 {
1351 warn_odr (t1, t2, NULL, NULL, warn, warned,
1352 G_("a type with different qualifiers is defined in another "
1353 "translation unit"));
1354 return false;
1355 }
1356
1357 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1358 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1359 {
1360 /* We can not trip this when comparing ODR types, only when trying to
1361 match different ODR derivations from different declarations.
1362 So WARN should be always false. */
1363 gcc_assert (!warn);
1364 return false;
1365 }
1366
1367 if (comp_type_attributes (t1, t2) != 1)
1368 {
1369 warn_odr (t1, t2, NULL, NULL, warn, warned,
1370 G_("a type with different attributes "
1371 "is defined in another translation unit"));
1372 return false;
1373 }
1374
1375 if (TREE_CODE (t1) == ENUMERAL_TYPE
1376 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1377 {
1378 tree v1, v2;
1379 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1380 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1381 {
1382 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1383 {
1384 warn_odr (t1, t2, NULL, NULL, warn, warned,
1385 G_("an enum with different value name"
1386 " is defined in another translation unit"));
1387 return false;
1388 }
1389 if (TREE_VALUE (v1) != TREE_VALUE (v2)
1390 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1391 DECL_INITIAL (TREE_VALUE (v2)), 0))
1392 {
1393 warn_odr (t1, t2, NULL, NULL, warn, warned,
1394 G_("an enum with different values is defined"
1395 " in another translation unit"));
1396 return false;
1397 }
1398 }
1399 if (v1 || v2)
1400 {
1401 warn_odr (t1, t2, NULL, NULL, warn, warned,
1402 G_("an enum with mismatching number of values "
1403 "is defined in another translation unit"));
1404 return false;
1405 }
1406 }
1407
1408 /* Non-aggregate types can be handled cheaply. */
1409 if (INTEGRAL_TYPE_P (t1)
1410 || SCALAR_FLOAT_TYPE_P (t1)
1411 || FIXED_POINT_TYPE_P (t1)
1412 || TREE_CODE (t1) == VECTOR_TYPE
1413 || TREE_CODE (t1) == COMPLEX_TYPE
1414 || TREE_CODE (t1) == OFFSET_TYPE
1415 || POINTER_TYPE_P (t1))
1416 {
1417 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1418 {
1419 warn_odr (t1, t2, NULL, NULL, warn, warned,
1420 G_("a type with different precision is defined "
1421 "in another translation unit"));
1422 return false;
1423 }
1424 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1425 {
1426 warn_odr (t1, t2, NULL, NULL, warn, warned,
1427 G_("a type with different signedness is defined "
1428 "in another translation unit"));
1429 return false;
1430 }
1431
1432 if (TREE_CODE (t1) == INTEGER_TYPE
1433 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1434 {
1435 /* char WRT uint_8? */
1436 warn_odr (t1, t2, NULL, NULL, warn, warned,
1437 G_("a different type is defined in another "
1438 "translation unit"));
1439 return false;
1440 }
1441
1442 /* For canonical type comparisons we do not want to build SCCs
1443 so we cannot compare pointed-to types. But we can, for now,
1444 require the same pointed-to type kind and match what
1445 useless_type_conversion_p would do. */
1446 if (POINTER_TYPE_P (t1))
1447 {
1448 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1449 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1450 {
1451 warn_odr (t1, t2, NULL, NULL, warn, warned,
1452 G_("it is defined as a pointer in different address "
1453 "space in another translation unit"));
1454 return false;
1455 }
1456
1457 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1458 visited, loc1, loc2))
1459 {
1460 warn_odr (t1, t2, NULL, NULL, warn, warned,
1461 G_("it is defined as a pointer to different type "
1462 "in another translation unit"));
1463 if (warn && warned)
1464 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2),
1465 loc1, loc2);
1466 return false;
1467 }
1468 }
1469
1470 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1471 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1472 visited, loc1, loc2))
1473 {
1474 /* Probably specific enough. */
1475 warn_odr (t1, t2, NULL, NULL, warn, warned,
1476 G_("a different type is defined "
1477 "in another translation unit"));
1478 if (warn && warned)
1479 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1480 return false;
1481 }
1482 }
1483 /* Do type-specific comparisons. */
1484 else switch (TREE_CODE (t1))
1485 {
1486 case ARRAY_TYPE:
1487 {
1488 /* Array types are the same if the element types are the same and
1489 the number of elements are the same. */
1490 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1491 visited, loc1, loc2))
1492 {
1493 warn_odr (t1, t2, NULL, NULL, warn, warned,
1494 G_("a different type is defined in another "
1495 "translation unit"));
1496 if (warn && warned)
1497 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1498 }
1499 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1500 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1501 == TYPE_NONALIASED_COMPONENT (t2));
1502
1503 tree i1 = TYPE_DOMAIN (t1);
1504 tree i2 = TYPE_DOMAIN (t2);
1505
1506 /* For an incomplete external array, the type domain can be
1507 NULL_TREE. Check this condition also. */
1508 if (i1 == NULL_TREE || i2 == NULL_TREE)
1509 return true;
1510
1511 tree min1 = TYPE_MIN_VALUE (i1);
1512 tree min2 = TYPE_MIN_VALUE (i2);
1513 tree max1 = TYPE_MAX_VALUE (i1);
1514 tree max2 = TYPE_MAX_VALUE (i2);
1515
1516 /* In C++, minimums should be always 0. */
1517 gcc_assert (min1 == min2);
1518 if (!operand_equal_p (max1, max2, 0))
1519 {
1520 warn_odr (t1, t2, NULL, NULL, warn, warned,
1521 G_("an array of different size is defined "
1522 "in another translation unit"));
1523 return false;
1524 }
1525 }
1526 break;
1527
1528 case METHOD_TYPE:
1529 case FUNCTION_TYPE:
1530 /* Function types are the same if the return type and arguments types
1531 are the same. */
1532 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1533 visited, loc1, loc2))
1534 {
1535 warn_odr (t1, t2, NULL, NULL, warn, warned,
1536 G_("has different return value "
1537 "in another translation unit"));
1538 if (warn && warned)
1539 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2), loc1, loc2);
1540 return false;
1541 }
1542
1543 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1544 || !prototype_p (t1) || !prototype_p (t2))
1545 return true;
1546 else
1547 {
1548 tree parms1, parms2;
1549
1550 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1551 parms1 && parms2;
1552 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1553 {
1554 if (!odr_subtypes_equivalent_p
1555 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited,
1556 loc1, loc2))
1557 {
1558 warn_odr (t1, t2, NULL, NULL, warn, warned,
1559 G_("has different parameters in another "
1560 "translation unit"));
1561 if (warn && warned)
1562 warn_types_mismatch (TREE_VALUE (parms1),
1563 TREE_VALUE (parms2), loc1, loc2);
1564 return false;
1565 }
1566 }
1567
1568 if (parms1 || parms2)
1569 {
1570 warn_odr (t1, t2, NULL, NULL, warn, warned,
1571 G_("has different parameters "
1572 "in another translation unit"));
1573 return false;
1574 }
1575
1576 return true;
1577 }
1578
1579 case RECORD_TYPE:
1580 case UNION_TYPE:
1581 case QUAL_UNION_TYPE:
1582 {
1583 tree f1, f2;
1584
1585 /* For aggregate types, all the fields must be the same. */
1586 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1587 {
1588 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1589 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1590 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1591 {
1592 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1593 warn_odr (t1, t2, NULL, NULL, warn, warned,
1594 G_("a type defined in another translation unit "
1595 "is not polymorphic"));
1596 else
1597 warn_odr (t1, t2, NULL, NULL, warn, warned,
1598 G_("a type defined in another translation unit "
1599 "is polymorphic"));
1600 return false;
1601 }
1602 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1603 f1 || f2;
1604 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1605 {
1606 /* Skip non-fields. */
1607 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1608 f1 = TREE_CHAIN (f1);
1609 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1610 f2 = TREE_CHAIN (f2);
1611 if (!f1 || !f2)
1612 break;
1613 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1614 {
1615 warn_odr (t1, t2, NULL, NULL, warn, warned,
1616 G_("a type with different virtual table pointers"
1617 " is defined in another translation unit"));
1618 return false;
1619 }
1620 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1621 {
1622 warn_odr (t1, t2, NULL, NULL, warn, warned,
1623 G_("a type with different bases is defined "
1624 "in another translation unit"));
1625 return false;
1626 }
1627 if (DECL_NAME (f1) != DECL_NAME (f2)
1628 && !DECL_ARTIFICIAL (f1))
1629 {
1630 warn_odr (t1, t2, f1, f2, warn, warned,
1631 G_("a field with different name is defined "
1632 "in another translation unit"));
1633 return false;
1634 }
1635 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1636 TREE_TYPE (f2), visited,
1637 loc1, loc2))
1638 {
1639 /* Do not warn about artificial fields and just go into
1640 generic field mismatch warning. */
1641 if (DECL_ARTIFICIAL (f1))
1642 break;
1643
1644 warn_odr (t1, t2, f1, f2, warn, warned,
1645 G_("a field of same name but different type "
1646 "is defined in another translation unit"));
1647 if (warn && warned)
1648 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2), loc1, loc2);
1649 return false;
1650 }
1651 if (!gimple_compare_field_offset (f1, f2))
1652 {
1653 /* Do not warn about artificial fields and just go into
1654 generic field mismatch warning. */
1655 if (DECL_ARTIFICIAL (f1))
1656 break;
1657 warn_odr (t1, t2, f1, f2, warn, warned,
1658 G_("fields has different layout "
1659 "in another translation unit"));
1660 return false;
1661 }
1662 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1663 == DECL_NONADDRESSABLE_P (f2));
1664 }
1665
1666 /* If one aggregate has more fields than the other, they
1667 are not the same. */
1668 if (f1 || f2)
1669 {
1670 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1671 warn_odr (t1, t2, NULL, NULL, warn, warned,
1672 G_("a type with different virtual table pointers"
1673 " is defined in another translation unit"));
1674 else if ((f1 && DECL_ARTIFICIAL (f1))
1675 || (f2 && DECL_ARTIFICIAL (f2)))
1676 warn_odr (t1, t2, NULL, NULL, warn, warned,
1677 G_("a type with different bases is defined "
1678 "in another translation unit"));
1679 else
1680 warn_odr (t1, t2, f1, f2, warn, warned,
1681 G_("a type with different number of fields "
1682 "is defined in another translation unit"));
1683
1684 return false;
1685 }
1686 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1687 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t1))
1688 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t2))
1689 && odr_type_p (TYPE_MAIN_VARIANT (t1))
1690 && odr_type_p (TYPE_MAIN_VARIANT (t2))
1691 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1692 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1693 {
1694 /* Currently free_lang_data sets TYPE_METHODS to error_mark_node
1695 if it is non-NULL so this loop will never realy execute. */
1696 if (TYPE_METHODS (TYPE_MAIN_VARIANT (t1)) != error_mark_node
1697 && TYPE_METHODS (TYPE_MAIN_VARIANT (t2)) != error_mark_node)
1698 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1699 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1700 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1701 {
1702 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1703 {
1704 warn_odr (t1, t2, f1, f2, warn, warned,
1705 G_("a different method of same type "
1706 "is defined in another "
1707 "translation unit"));
1708 return false;
1709 }
1710 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1711 {
1712 warn_odr (t1, t2, f1, f2, warn, warned,
1713 G_("s definition that differs by virtual "
1714 "keyword in another translation unit"));
1715 return false;
1716 }
1717 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1718 {
1719 warn_odr (t1, t2, f1, f2, warn, warned,
1720 G_("virtual table layout differs "
1721 "in another translation unit"));
1722 return false;
1723 }
1724 if (odr_subtypes_equivalent_p (TREE_TYPE (f1),
1725 TREE_TYPE (f2), visited,
1726 loc1, loc2))
1727 {
1728 warn_odr (t1, t2, f1, f2, warn, warned,
1729 G_("method with incompatible type is "
1730 "defined in another translation unit"));
1731 return false;
1732 }
1733 }
1734 if ((f1 == NULL) != (f2 == NULL))
1735 {
1736 warn_odr (t1, t2, NULL, NULL, warn, warned,
1737 G_("a type with different number of methods "
1738 "is defined in another translation unit"));
1739 return false;
1740 }
1741 }
1742 }
1743 break;
1744 }
1745 case VOID_TYPE:
1746 case NULLPTR_TYPE:
1747 break;
1748
1749 default:
1750 debug_tree (t1);
1751 gcc_unreachable ();
1752 }
1753
1754 /* Those are better to come last as they are utterly uninformative. */
1755 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1756 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1757 {
1758 warn_odr (t1, t2, NULL, NULL, warn, warned,
1759 G_("a type with different size "
1760 "is defined in another translation unit"));
1761 return false;
1762 }
1763 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1764 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1765 {
1766 warn_odr (t1, t2, NULL, NULL, warn, warned,
1767 G_("a type with different alignment "
1768 "is defined in another translation unit"));
1769 return false;
1770 }
1771 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1772 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1773 TYPE_SIZE_UNIT (t2), 0));
1774 return true;
1775 }
1776
1777 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1778
1779 bool
1780 odr_types_equivalent_p (tree type1, tree type2)
1781 {
1782 hash_set<type_pair> visited;
1783
1784 #ifdef ENABLE_CHECKING
1785 gcc_assert (odr_or_derived_type_p (type1) && odr_or_derived_type_p (type2));
1786 #endif
1787 return odr_types_equivalent_p (type1, type2, false, NULL,
1788 &visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1789 }
1790
1791 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1792 from VAL->type. This may happen in LTO where tree merging did not merge
1793 all variants of the same type or due to ODR violation.
1794
1795 Analyze and report ODR violations and add type to duplicate list.
1796 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1797 this is first time we see definition of a class return true so the
1798 base types are analyzed. */
1799
1800 static bool
1801 add_type_duplicate (odr_type val, tree type)
1802 {
1803 bool build_bases = false;
1804 bool prevail = false;
1805 bool odr_must_violate = false;
1806
1807 if (!val->types_set)
1808 val->types_set = new hash_set<tree>;
1809
1810 /* Chose polymorphic type as leader (this happens only in case of ODR
1811 violations. */
1812 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1813 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1814 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1815 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1816 {
1817 prevail = true;
1818 build_bases = true;
1819 }
1820 /* Always prefer complete type to be the leader. */
1821 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1822 {
1823 prevail = true;
1824 build_bases = TYPE_BINFO (type);
1825 }
1826 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1827 ;
1828 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1829 && TREE_CODE (type) == ENUMERAL_TYPE
1830 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1831 prevail = true;
1832 else if (TREE_CODE (val->type) == RECORD_TYPE
1833 && TREE_CODE (type) == RECORD_TYPE
1834 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1835 {
1836 gcc_assert (!val->bases.length ());
1837 build_bases = true;
1838 prevail = true;
1839 }
1840
1841 if (prevail)
1842 std::swap (val->type, type);
1843
1844 val->types_set->add (type);
1845
1846 /* If we now have a mangled name, be sure to record it to val->type
1847 so ODR hash can work. */
1848
1849 if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1850 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1851 DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1852
1853 bool merge = true;
1854 bool base_mismatch = false;
1855 unsigned int i;
1856 bool warned = false;
1857 hash_set<type_pair> visited;
1858
1859 gcc_assert (in_lto_p);
1860 vec_safe_push (val->types, type);
1861
1862 /* If both are class types, compare the bases. */
1863 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1864 && TREE_CODE (val->type) == RECORD_TYPE
1865 && TREE_CODE (type) == RECORD_TYPE
1866 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1867 {
1868 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1869 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1870 {
1871 if (!flag_ltrans && !warned && !val->odr_violated)
1872 {
1873 tree extra_base;
1874 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1875 "a type with the same name but different "
1876 "number of polymorphic bases is "
1877 "defined in another translation unit");
1878 if (warned)
1879 {
1880 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1881 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1882 extra_base = BINFO_BASE_BINFO
1883 (TYPE_BINFO (type),
1884 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1885 else
1886 extra_base = BINFO_BASE_BINFO
1887 (TYPE_BINFO (val->type),
1888 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1889 tree extra_base_type = BINFO_TYPE (extra_base);
1890 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1891 "the extra base is defined here");
1892 }
1893 }
1894 base_mismatch = true;
1895 }
1896 else
1897 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1898 {
1899 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1900 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1901 tree type1 = BINFO_TYPE (base1);
1902 tree type2 = BINFO_TYPE (base2);
1903
1904 if (types_odr_comparable (type1, type2))
1905 {
1906 if (!types_same_for_odr (type1, type2))
1907 base_mismatch = true;
1908 }
1909 else
1910 if (!odr_types_equivalent_p (type1, type2))
1911 base_mismatch = true;
1912 if (base_mismatch)
1913 {
1914 if (!warned && !val->odr_violated)
1915 {
1916 warn_odr (type, val->type, NULL, NULL,
1917 !warned, &warned,
1918 "a type with the same name but different base "
1919 "type is defined in another translation unit");
1920 if (warned)
1921 warn_types_mismatch (type1, type2,
1922 UNKNOWN_LOCATION, UNKNOWN_LOCATION);
1923 }
1924 break;
1925 }
1926 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1927 {
1928 base_mismatch = true;
1929 if (!warned && !val->odr_violated)
1930 warn_odr (type, val->type, NULL, NULL,
1931 !warned, &warned,
1932 "a type with the same name but different base "
1933 "layout is defined in another translation unit");
1934 break;
1935 }
1936 /* One of bases is not of complete type. */
1937 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1938 {
1939 /* If we have a polymorphic type info specified for TYPE1
1940 but not for TYPE2 we possibly missed a base when recording
1941 VAL->type earlier.
1942 Be sure this does not happen. */
1943 if (TYPE_BINFO (type1)
1944 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1945 && !build_bases)
1946 odr_must_violate = true;
1947 break;
1948 }
1949 /* One base is polymorphic and the other not.
1950 This ought to be diagnosed earlier, but do not ICE in the
1951 checking bellow. */
1952 else if (TYPE_BINFO (type1)
1953 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1954 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1955 {
1956 if (!warned && !val->odr_violated)
1957 warn_odr (type, val->type, NULL, NULL,
1958 !warned, &warned,
1959 "a base of the type is polymorphic only in one "
1960 "translation unit");
1961 base_mismatch = true;
1962 break;
1963 }
1964 }
1965 if (base_mismatch)
1966 {
1967 merge = false;
1968 odr_violation_reported = true;
1969 val->odr_violated = true;
1970
1971 if (symtab->dump_file)
1972 {
1973 fprintf (symtab->dump_file, "ODR base violation\n");
1974
1975 print_node (symtab->dump_file, "", val->type, 0);
1976 putc ('\n',symtab->dump_file);
1977 print_node (symtab->dump_file, "", type, 0);
1978 putc ('\n',symtab->dump_file);
1979 }
1980 }
1981 }
1982
1983 /* Next compare memory layout. */
1984 if (!odr_types_equivalent_p (val->type, type,
1985 !flag_ltrans && !val->odr_violated && !warned,
1986 &warned, &visited,
1987 DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
1988 DECL_SOURCE_LOCATION (TYPE_NAME (type))))
1989 {
1990 merge = false;
1991 odr_violation_reported = true;
1992 val->odr_violated = true;
1993 if (symtab->dump_file)
1994 {
1995 fprintf (symtab->dump_file, "ODR violation\n");
1996
1997 print_node (symtab->dump_file, "", val->type, 0);
1998 putc ('\n',symtab->dump_file);
1999 print_node (symtab->dump_file, "", type, 0);
2000 putc ('\n',symtab->dump_file);
2001 }
2002 }
2003 gcc_assert (val->odr_violated || !odr_must_violate);
2004 /* Sanity check that all bases will be build same way again. */
2005 #ifdef ENABLE_CHECKING
2006 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
2007 && TREE_CODE (val->type) == RECORD_TYPE
2008 && TREE_CODE (type) == RECORD_TYPE
2009 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
2010 && !val->odr_violated
2011 && !base_mismatch && val->bases.length ())
2012 {
2013 unsigned int num_poly_bases = 0;
2014 unsigned int j;
2015
2016 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
2017 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2018 (TYPE_BINFO (type), i)))
2019 num_poly_bases++;
2020 gcc_assert (num_poly_bases == val->bases.length ());
2021 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
2022 i++)
2023 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2024 (TYPE_BINFO (type), i)))
2025 {
2026 odr_type base = get_odr_type
2027 (BINFO_TYPE
2028 (BINFO_BASE_BINFO (TYPE_BINFO (type),
2029 i)),
2030 true);
2031 gcc_assert (val->bases[j] == base);
2032 j++;
2033 }
2034 }
2035 #endif
2036
2037
2038 /* Regularize things a little. During LTO same types may come with
2039 different BINFOs. Either because their virtual table was
2040 not merged by tree merging and only later at decl merging or
2041 because one type comes with external vtable, while other
2042 with internal. We want to merge equivalent binfos to conserve
2043 memory and streaming overhead.
2044
2045 The external vtables are more harmful: they contain references
2046 to external declarations of methods that may be defined in the
2047 merged LTO unit. For this reason we absolutely need to remove
2048 them and replace by internal variants. Not doing so will lead
2049 to incomplete answers from possible_polymorphic_call_targets.
2050
2051 FIXME: disable for now; because ODR types are now build during
2052 streaming in, the variants do not need to be linked to the type,
2053 yet. We need to do the merging in cleanup pass to be implemented
2054 soon. */
2055 if (!flag_ltrans && merge
2056 && 0
2057 && TREE_CODE (val->type) == RECORD_TYPE
2058 && TREE_CODE (type) == RECORD_TYPE
2059 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
2060 && TYPE_MAIN_VARIANT (type) == type
2061 && TYPE_MAIN_VARIANT (val->type) == val->type
2062 && BINFO_VTABLE (TYPE_BINFO (val->type))
2063 && BINFO_VTABLE (TYPE_BINFO (type)))
2064 {
2065 tree master_binfo = TYPE_BINFO (val->type);
2066 tree v1 = BINFO_VTABLE (master_binfo);
2067 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
2068
2069 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
2070 {
2071 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
2072 && operand_equal_p (TREE_OPERAND (v1, 1),
2073 TREE_OPERAND (v2, 1), 0));
2074 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
2075 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
2076 }
2077 gcc_assert (DECL_ASSEMBLER_NAME (v1)
2078 == DECL_ASSEMBLER_NAME (v2));
2079
2080 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
2081 {
2082 unsigned int i;
2083
2084 set_type_binfo (val->type, TYPE_BINFO (type));
2085 for (i = 0; i < val->types->length (); i++)
2086 {
2087 if (TYPE_BINFO ((*val->types)[i])
2088 == master_binfo)
2089 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
2090 }
2091 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
2092 }
2093 else
2094 set_type_binfo (type, master_binfo);
2095 }
2096 return build_bases;
2097 }
2098
2099 /* Get ODR type hash entry for TYPE. If INSERT is true, create
2100 possibly new entry. */
2101
2102 odr_type
2103 get_odr_type (tree type, bool insert)
2104 {
2105 odr_type_d **slot = NULL;
2106 odr_type_d **vtable_slot = NULL;
2107 odr_type val = NULL;
2108 hashval_t hash;
2109 bool build_bases = false;
2110 bool insert_to_odr_array = false;
2111 int base_id = -1;
2112
2113 type = main_odr_variant (type);
2114
2115 gcc_checking_assert (can_be_name_hashed_p (type)
2116 || can_be_vtable_hashed_p (type));
2117
2118 /* Lookup entry, first try name hash, fallback to vtable hash. */
2119 if (can_be_name_hashed_p (type))
2120 {
2121 hash = hash_odr_name (type);
2122 slot = odr_hash->find_slot_with_hash (type, hash,
2123 insert ? INSERT : NO_INSERT);
2124 }
2125 if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
2126 {
2127 hash = hash_odr_vtable (type);
2128 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2129 insert ? INSERT : NO_INSERT);
2130 }
2131
2132 if (!slot && !vtable_slot)
2133 return NULL;
2134
2135 /* See if we already have entry for type. */
2136 if ((slot && *slot) || (vtable_slot && *vtable_slot))
2137 {
2138 if (slot && *slot)
2139 {
2140 val = *slot;
2141 #ifdef ENABLE_CHECKING
2142 if (in_lto_p && can_be_vtable_hashed_p (type))
2143 {
2144 hash = hash_odr_vtable (type);
2145 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2146 NO_INSERT);
2147 gcc_assert (!vtable_slot || *vtable_slot == *slot);
2148 vtable_slot = NULL;
2149 }
2150 #endif
2151 }
2152 else if (*vtable_slot)
2153 val = *vtable_slot;
2154
2155 if (val->type != type
2156 && (!val->types_set || !val->types_set->add (type)))
2157 {
2158 gcc_assert (insert);
2159 /* We have type duplicate, but it may introduce vtable name or
2160 mangled name; be sure to keep hashes in sync. */
2161 if (in_lto_p && can_be_vtable_hashed_p (type)
2162 && (!vtable_slot || !*vtable_slot))
2163 {
2164 if (!vtable_slot)
2165 {
2166 hash = hash_odr_vtable (type);
2167 vtable_slot = odr_vtable_hash->find_slot_with_hash
2168 (type, hash, INSERT);
2169 gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2170 }
2171 *vtable_slot = val;
2172 }
2173 if (slot && !*slot)
2174 *slot = val;
2175 build_bases = add_type_duplicate (val, type);
2176 }
2177 }
2178 else
2179 {
2180 val = ggc_cleared_alloc<odr_type_d> ();
2181 val->type = type;
2182 val->bases = vNULL;
2183 val->derived_types = vNULL;
2184 if (type_with_linkage_p (type))
2185 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2186 else
2187 val->anonymous_namespace = 0;
2188 build_bases = COMPLETE_TYPE_P (val->type);
2189 insert_to_odr_array = true;
2190 if (slot)
2191 *slot = val;
2192 if (vtable_slot)
2193 *vtable_slot = val;
2194 }
2195
2196 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2197 && type_with_linkage_p (type)
2198 && type == TYPE_MAIN_VARIANT (type))
2199 {
2200 tree binfo = TYPE_BINFO (type);
2201 unsigned int i;
2202
2203 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2204
2205 val->all_derivations_known = type_all_derivations_known_p (type);
2206 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2207 /* For now record only polymorphic types. other are
2208 pointless for devirtualization and we can not precisely
2209 determine ODR equivalency of these during LTO. */
2210 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2211 {
2212 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2213 odr_type base = get_odr_type (base_type, true);
2214 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2215 base->derived_types.safe_push (val);
2216 val->bases.safe_push (base);
2217 if (base->id > base_id)
2218 base_id = base->id;
2219 }
2220 }
2221 /* Ensure that type always appears after bases. */
2222 if (insert_to_odr_array)
2223 {
2224 if (odr_types_ptr)
2225 val->id = odr_types.length ();
2226 vec_safe_push (odr_types_ptr, val);
2227 }
2228 else if (base_id > val->id)
2229 {
2230 odr_types[val->id] = 0;
2231 /* Be sure we did not recorded any derived types; these may need
2232 renumbering too. */
2233 gcc_assert (val->derived_types.length() == 0);
2234 if (odr_types_ptr)
2235 val->id = odr_types.length ();
2236 vec_safe_push (odr_types_ptr, val);
2237 }
2238 return val;
2239 }
2240
2241 /* Add TYPE od ODR type hash. */
2242
2243 void
2244 register_odr_type (tree type)
2245 {
2246 if (!odr_hash)
2247 {
2248 odr_hash = new odr_hash_type (23);
2249 if (in_lto_p)
2250 odr_vtable_hash = new odr_vtable_hash_type (23);
2251 }
2252 /* Arrange things to be nicer and insert main variants first.
2253 ??? fundamental prerecorded types do not have mangled names; this
2254 makes it possible that non-ODR type is main_odr_variant of ODR type.
2255 Things may get smoother if LTO FE set mangled name of those types same
2256 way as C++ FE does. */
2257 if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2258 && odr_type_p (TYPE_MAIN_VARIANT (type)))
2259 get_odr_type (TYPE_MAIN_VARIANT (type), true);
2260 if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2261 get_odr_type (type, true);
2262 }
2263
2264 /* Return true if type is known to have no derivations. */
2265
2266 bool
2267 type_known_to_have_no_derivations_p (tree t)
2268 {
2269 return (type_all_derivations_known_p (t)
2270 && (TYPE_FINAL_P (t)
2271 || (odr_hash
2272 && !get_odr_type (t, true)->derived_types.length())));
2273 }
2274
2275 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2276 recursive printing. */
2277
2278 static void
2279 dump_odr_type (FILE *f, odr_type t, int indent=0)
2280 {
2281 unsigned int i;
2282 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2283 print_generic_expr (f, t->type, TDF_SLIM);
2284 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2285 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2286 if (TYPE_NAME (t->type))
2287 {
2288 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2289 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2290 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2291 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2292 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2293 IDENTIFIER_POINTER
2294 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2295 }
2296 if (t->bases.length ())
2297 {
2298 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2299 for (i = 0; i < t->bases.length (); i++)
2300 fprintf (f, " %i", t->bases[i]->id);
2301 fprintf (f, "\n");
2302 }
2303 if (t->derived_types.length ())
2304 {
2305 fprintf (f, "%*s derived types:\n", indent * 2, "");
2306 for (i = 0; i < t->derived_types.length (); i++)
2307 dump_odr_type (f, t->derived_types[i], indent + 1);
2308 }
2309 fprintf (f, "\n");
2310 }
2311
2312 /* Dump the type inheritance graph. */
2313
2314 static void
2315 dump_type_inheritance_graph (FILE *f)
2316 {
2317 unsigned int i;
2318 if (!odr_types_ptr)
2319 return;
2320 fprintf (f, "\n\nType inheritance graph:\n");
2321 for (i = 0; i < odr_types.length (); i++)
2322 {
2323 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2324 dump_odr_type (f, odr_types[i]);
2325 }
2326 for (i = 0; i < odr_types.length (); i++)
2327 {
2328 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2329 {
2330 unsigned int j;
2331 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2332 print_node (f, "", odr_types[i]->type, 0);
2333 for (j = 0; j < odr_types[i]->types->length (); j++)
2334 {
2335 tree t;
2336 fprintf (f, "duplicate #%i\n", j);
2337 print_node (f, "", (*odr_types[i]->types)[j], 0);
2338 t = (*odr_types[i]->types)[j];
2339 while (TYPE_P (t) && TYPE_CONTEXT (t))
2340 {
2341 t = TYPE_CONTEXT (t);
2342 print_node (f, "", t, 0);
2343 }
2344 putc ('\n',f);
2345 }
2346 }
2347 }
2348 }
2349
2350 /* Initialize IPA devirt and build inheritance tree graph. */
2351
2352 void
2353 build_type_inheritance_graph (void)
2354 {
2355 struct symtab_node *n;
2356 FILE *inheritance_dump_file;
2357 int flags;
2358
2359 if (odr_hash)
2360 return;
2361 timevar_push (TV_IPA_INHERITANCE);
2362 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2363 odr_hash = new odr_hash_type (23);
2364 if (in_lto_p)
2365 odr_vtable_hash = new odr_vtable_hash_type (23);
2366
2367 /* We reconstruct the graph starting of types of all methods seen in the
2368 the unit. */
2369 FOR_EACH_SYMBOL (n)
2370 if (is_a <cgraph_node *> (n)
2371 && DECL_VIRTUAL_P (n->decl)
2372 && n->real_symbol_p ())
2373 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2374
2375 /* Look also for virtual tables of types that do not define any methods.
2376
2377 We need it in a case where class B has virtual base of class A
2378 re-defining its virtual method and there is class C with no virtual
2379 methods with B as virtual base.
2380
2381 Here we output B's virtual method in two variant - for non-virtual
2382 and virtual inheritance. B's virtual table has non-virtual version,
2383 while C's has virtual.
2384
2385 For this reason we need to know about C in order to include both
2386 variants of B. More correctly, record_target_from_binfo should
2387 add both variants of the method when walking B, but we have no
2388 link in between them.
2389
2390 We rely on fact that either the method is exported and thus we
2391 assume it is called externally or C is in anonymous namespace and
2392 thus we will see the vtable. */
2393
2394 else if (is_a <varpool_node *> (n)
2395 && DECL_VIRTUAL_P (n->decl)
2396 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2397 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2398 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2399 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2400 if (inheritance_dump_file)
2401 {
2402 dump_type_inheritance_graph (inheritance_dump_file);
2403 dump_end (TDI_inheritance, inheritance_dump_file);
2404 }
2405 timevar_pop (TV_IPA_INHERITANCE);
2406 }
2407
2408 /* Return true if N has reference from live virtual table
2409 (and thus can be a destination of polymorphic call).
2410 Be conservatively correct when callgraph is not built or
2411 if the method may be referred externally. */
2412
2413 static bool
2414 referenced_from_vtable_p (struct cgraph_node *node)
2415 {
2416 int i;
2417 struct ipa_ref *ref;
2418 bool found = false;
2419
2420 if (node->externally_visible
2421 || DECL_EXTERNAL (node->decl)
2422 || node->used_from_other_partition)
2423 return true;
2424
2425 /* Keep this test constant time.
2426 It is unlikely this can happen except for the case where speculative
2427 devirtualization introduced many speculative edges to this node.
2428 In this case the target is very likely alive anyway. */
2429 if (node->ref_list.referring.length () > 100)
2430 return true;
2431
2432 /* We need references built. */
2433 if (symtab->state <= CONSTRUCTION)
2434 return true;
2435
2436 for (i = 0; node->iterate_referring (i, ref); i++)
2437 if ((ref->use == IPA_REF_ALIAS
2438 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2439 || (ref->use == IPA_REF_ADDR
2440 && TREE_CODE (ref->referring->decl) == VAR_DECL
2441 && DECL_VIRTUAL_P (ref->referring->decl)))
2442 {
2443 found = true;
2444 break;
2445 }
2446 return found;
2447 }
2448
2449 /* If TARGET has associated node, record it in the NODES array.
2450 CAN_REFER specify if program can refer to the target directly.
2451 if TARGET is unknown (NULL) or it can not be inserted (for example because
2452 its body was already removed and there is no way to refer to it), clear
2453 COMPLETEP. */
2454
2455 static void
2456 maybe_record_node (vec <cgraph_node *> &nodes,
2457 tree target, hash_set<tree> *inserted,
2458 bool can_refer,
2459 bool *completep)
2460 {
2461 struct cgraph_node *target_node, *alias_target;
2462 enum availability avail;
2463
2464 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2465 list of targets; the runtime effect of calling them is undefined.
2466 Only "real" virtual methods should be accounted. */
2467 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
2468 return;
2469
2470 if (!can_refer)
2471 {
2472 /* The only case when method of anonymous namespace becomes unreferable
2473 is when we completely optimized it out. */
2474 if (flag_ltrans
2475 || !target
2476 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2477 *completep = false;
2478 return;
2479 }
2480
2481 if (!target)
2482 return;
2483
2484 target_node = cgraph_node::get (target);
2485
2486 /* Prefer alias target over aliases, so we do not get confused by
2487 fake duplicates. */
2488 if (target_node)
2489 {
2490 alias_target = target_node->ultimate_alias_target (&avail);
2491 if (target_node != alias_target
2492 && avail >= AVAIL_AVAILABLE
2493 && target_node->get_availability ())
2494 target_node = alias_target;
2495 }
2496
2497 /* Method can only be called by polymorphic call if any
2498 of vtables referring to it are alive.
2499
2500 While this holds for non-anonymous functions, too, there are
2501 cases where we want to keep them in the list; for example
2502 inline functions with -fno-weak are static, but we still
2503 may devirtualize them when instance comes from other unit.
2504 The same holds for LTO.
2505
2506 Currently we ignore these functions in speculative devirtualization.
2507 ??? Maybe it would make sense to be more aggressive for LTO even
2508 elsewhere. */
2509 if (!flag_ltrans
2510 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2511 && (!target_node
2512 || !referenced_from_vtable_p (target_node)))
2513 ;
2514 /* See if TARGET is useful function we can deal with. */
2515 else if (target_node != NULL
2516 && (TREE_PUBLIC (target)
2517 || DECL_EXTERNAL (target)
2518 || target_node->definition)
2519 && target_node->real_symbol_p ())
2520 {
2521 gcc_assert (!target_node->global.inlined_to);
2522 gcc_assert (target_node->real_symbol_p ());
2523 if (!inserted->add (target))
2524 {
2525 cached_polymorphic_call_targets->add (target_node);
2526 nodes.safe_push (target_node);
2527 }
2528 }
2529 else if (completep
2530 && (!type_in_anonymous_namespace_p
2531 (DECL_CONTEXT (target))
2532 || flag_ltrans))
2533 *completep = false;
2534 }
2535
2536 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2537 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2538 method in vtable and insert method to NODES array
2539 or BASES_TO_CONSIDER if this array is non-NULL.
2540 Otherwise recurse to base BINFOs.
2541 This matches what get_binfo_at_offset does, but with offset
2542 being unknown.
2543
2544 TYPE_BINFOS is a stack of BINFOS of types with defined
2545 virtual table seen on way from class type to BINFO.
2546
2547 MATCHED_VTABLES tracks virtual tables we already did lookup
2548 for virtual function in. INSERTED tracks nodes we already
2549 inserted.
2550
2551 ANONYMOUS is true if BINFO is part of anonymous namespace.
2552
2553 Clear COMPLETEP when we hit unreferable target.
2554 */
2555
2556 static void
2557 record_target_from_binfo (vec <cgraph_node *> &nodes,
2558 vec <tree> *bases_to_consider,
2559 tree binfo,
2560 tree otr_type,
2561 vec <tree> &type_binfos,
2562 HOST_WIDE_INT otr_token,
2563 tree outer_type,
2564 HOST_WIDE_INT offset,
2565 hash_set<tree> *inserted,
2566 hash_set<tree> *matched_vtables,
2567 bool anonymous,
2568 bool *completep)
2569 {
2570 tree type = BINFO_TYPE (binfo);
2571 int i;
2572 tree base_binfo;
2573
2574
2575 if (BINFO_VTABLE (binfo))
2576 type_binfos.safe_push (binfo);
2577 if (types_same_for_odr (type, outer_type))
2578 {
2579 int i;
2580 tree type_binfo = NULL;
2581
2582 /* Look up BINFO with virtual table. For normal types it is always last
2583 binfo on stack. */
2584 for (i = type_binfos.length () - 1; i >= 0; i--)
2585 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2586 {
2587 type_binfo = type_binfos[i];
2588 break;
2589 }
2590 if (BINFO_VTABLE (binfo))
2591 type_binfos.pop ();
2592 /* If this is duplicated BINFO for base shared by virtual inheritance,
2593 we may not have its associated vtable. This is not a problem, since
2594 we will walk it on the other path. */
2595 if (!type_binfo)
2596 return;
2597 tree inner_binfo = get_binfo_at_offset (type_binfo,
2598 offset, otr_type);
2599 if (!inner_binfo)
2600 {
2601 gcc_assert (odr_violation_reported);
2602 return;
2603 }
2604 /* For types in anonymous namespace first check if the respective vtable
2605 is alive. If not, we know the type can't be called. */
2606 if (!flag_ltrans && anonymous)
2607 {
2608 tree vtable = BINFO_VTABLE (inner_binfo);
2609 varpool_node *vnode;
2610
2611 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2612 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2613 vnode = varpool_node::get (vtable);
2614 if (!vnode || !vnode->definition)
2615 return;
2616 }
2617 gcc_assert (inner_binfo);
2618 if (bases_to_consider
2619 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2620 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2621 {
2622 bool can_refer;
2623 tree target = gimple_get_virt_method_for_binfo (otr_token,
2624 inner_binfo,
2625 &can_refer);
2626 if (!bases_to_consider)
2627 maybe_record_node (nodes, target, inserted, can_refer, completep);
2628 /* Destructors are never called via construction vtables. */
2629 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2630 bases_to_consider->safe_push (target);
2631 }
2632 return;
2633 }
2634
2635 /* Walk bases. */
2636 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2637 /* Walking bases that have no virtual method is pointless exercise. */
2638 if (polymorphic_type_binfo_p (base_binfo))
2639 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2640 type_binfos,
2641 otr_token, outer_type, offset, inserted,
2642 matched_vtables, anonymous, completep);
2643 if (BINFO_VTABLE (binfo))
2644 type_binfos.pop ();
2645 }
2646
2647 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2648 of TYPE, insert them to NODES, recurse into derived nodes.
2649 INSERTED is used to avoid duplicate insertions of methods into NODES.
2650 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2651 Clear COMPLETEP if unreferable target is found.
2652
2653 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2654 all cases where BASE_SKIPPED is true (because the base is abstract
2655 class). */
2656
2657 static void
2658 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2659 hash_set<tree> *inserted,
2660 hash_set<tree> *matched_vtables,
2661 tree otr_type,
2662 odr_type type,
2663 HOST_WIDE_INT otr_token,
2664 tree outer_type,
2665 HOST_WIDE_INT offset,
2666 bool *completep,
2667 vec <tree> &bases_to_consider,
2668 bool consider_construction)
2669 {
2670 tree binfo = TYPE_BINFO (type->type);
2671 unsigned int i;
2672 auto_vec <tree, 8> type_binfos;
2673 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2674
2675 /* We may need to consider types w/o instances because of possible derived
2676 types using their methods either directly or via construction vtables.
2677 We are safe to skip them when all derivations are known, since we will
2678 handle them later.
2679 This is done by recording them to BASES_TO_CONSIDER array. */
2680 if (possibly_instantiated || consider_construction)
2681 {
2682 record_target_from_binfo (nodes,
2683 (!possibly_instantiated
2684 && type_all_derivations_known_p (type->type))
2685 ? &bases_to_consider : NULL,
2686 binfo, otr_type, type_binfos, otr_token,
2687 outer_type, offset,
2688 inserted, matched_vtables,
2689 type->anonymous_namespace, completep);
2690 }
2691 for (i = 0; i < type->derived_types.length (); i++)
2692 possible_polymorphic_call_targets_1 (nodes, inserted,
2693 matched_vtables,
2694 otr_type,
2695 type->derived_types[i],
2696 otr_token, outer_type, offset, completep,
2697 bases_to_consider, consider_construction);
2698 }
2699
2700 /* Cache of queries for polymorphic call targets.
2701
2702 Enumerating all call targets may get expensive when there are many
2703 polymorphic calls in the program, so we memoize all the previous
2704 queries and avoid duplicated work. */
2705
2706 struct polymorphic_call_target_d
2707 {
2708 HOST_WIDE_INT otr_token;
2709 ipa_polymorphic_call_context context;
2710 odr_type type;
2711 vec <cgraph_node *> targets;
2712 tree decl_warning;
2713 int type_warning;
2714 bool complete;
2715 bool speculative;
2716 };
2717
2718 /* Polymorphic call target cache helpers. */
2719
2720 struct polymorphic_call_target_hasher
2721 : pointer_hash <polymorphic_call_target_d>
2722 {
2723 static inline hashval_t hash (const polymorphic_call_target_d *);
2724 static inline bool equal (const polymorphic_call_target_d *,
2725 const polymorphic_call_target_d *);
2726 static inline void remove (polymorphic_call_target_d *);
2727 };
2728
2729 /* Return the computed hashcode for ODR_QUERY. */
2730
2731 inline hashval_t
2732 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2733 {
2734 inchash::hash hstate (odr_query->otr_token);
2735
2736 hstate.add_wide_int (odr_query->type->id);
2737 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2738 hstate.add_wide_int (odr_query->context.offset);
2739
2740 if (odr_query->context.speculative_outer_type)
2741 {
2742 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2743 hstate.add_wide_int (odr_query->context.speculative_offset);
2744 }
2745 hstate.add_flag (odr_query->speculative);
2746 hstate.add_flag (odr_query->context.maybe_in_construction);
2747 hstate.add_flag (odr_query->context.maybe_derived_type);
2748 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2749 hstate.commit_flag ();
2750 return hstate.end ();
2751 }
2752
2753 /* Compare cache entries T1 and T2. */
2754
2755 inline bool
2756 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2757 const polymorphic_call_target_d *t2)
2758 {
2759 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2760 && t1->speculative == t2->speculative
2761 && t1->context.offset == t2->context.offset
2762 && t1->context.speculative_offset == t2->context.speculative_offset
2763 && t1->context.outer_type == t2->context.outer_type
2764 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2765 && t1->context.maybe_in_construction
2766 == t2->context.maybe_in_construction
2767 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2768 && (t1->context.speculative_maybe_derived_type
2769 == t2->context.speculative_maybe_derived_type));
2770 }
2771
2772 /* Remove entry in polymorphic call target cache hash. */
2773
2774 inline void
2775 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2776 {
2777 v->targets.release ();
2778 free (v);
2779 }
2780
2781 /* Polymorphic call target query cache. */
2782
2783 typedef hash_table<polymorphic_call_target_hasher>
2784 polymorphic_call_target_hash_type;
2785 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2786
2787 /* Destroy polymorphic call target query cache. */
2788
2789 static void
2790 free_polymorphic_call_targets_hash ()
2791 {
2792 if (cached_polymorphic_call_targets)
2793 {
2794 delete polymorphic_call_target_hash;
2795 polymorphic_call_target_hash = NULL;
2796 delete cached_polymorphic_call_targets;
2797 cached_polymorphic_call_targets = NULL;
2798 }
2799 }
2800
2801 /* When virtual function is removed, we may need to flush the cache. */
2802
2803 static void
2804 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2805 {
2806 if (cached_polymorphic_call_targets
2807 && cached_polymorphic_call_targets->contains (n))
2808 free_polymorphic_call_targets_hash ();
2809 }
2810
2811 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2812
2813 tree
2814 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2815 tree vtable)
2816 {
2817 tree v = BINFO_VTABLE (binfo);
2818 int i;
2819 tree base_binfo;
2820 unsigned HOST_WIDE_INT this_offset;
2821
2822 if (v)
2823 {
2824 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2825 gcc_unreachable ();
2826
2827 if (offset == this_offset
2828 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2829 return binfo;
2830 }
2831
2832 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2833 if (polymorphic_type_binfo_p (base_binfo))
2834 {
2835 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2836 if (base_binfo)
2837 return base_binfo;
2838 }
2839 return NULL;
2840 }
2841
2842 /* T is known constant value of virtual table pointer.
2843 Store virtual table to V and its offset to OFFSET.
2844 Return false if T does not look like virtual table reference. */
2845
2846 bool
2847 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2848 unsigned HOST_WIDE_INT *offset)
2849 {
2850 /* We expect &MEM[(void *)&virtual_table + 16B].
2851 We obtain object's BINFO from the context of the virtual table.
2852 This one contains pointer to virtual table represented via
2853 POINTER_PLUS_EXPR. Verify that this pointer matches what
2854 we propagated through.
2855
2856 In the case of virtual inheritance, the virtual tables may
2857 be nested, i.e. the offset may be different from 16 and we may
2858 need to dive into the type representation. */
2859 if (TREE_CODE (t) == ADDR_EXPR
2860 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2861 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2862 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2863 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2864 == VAR_DECL)
2865 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2866 (TREE_OPERAND (t, 0), 0), 0)))
2867 {
2868 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2869 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2870 return true;
2871 }
2872
2873 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2874 We need to handle it when T comes from static variable initializer or
2875 BINFO. */
2876 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2877 {
2878 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2879 t = TREE_OPERAND (t, 0);
2880 }
2881 else
2882 *offset = 0;
2883
2884 if (TREE_CODE (t) != ADDR_EXPR)
2885 return false;
2886 *v = TREE_OPERAND (t, 0);
2887 return true;
2888 }
2889
2890 /* T is known constant value of virtual table pointer. Return BINFO of the
2891 instance type. */
2892
2893 tree
2894 vtable_pointer_value_to_binfo (const_tree t)
2895 {
2896 tree vtable;
2897 unsigned HOST_WIDE_INT offset;
2898
2899 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2900 return NULL_TREE;
2901
2902 /* FIXME: for stores of construction vtables we return NULL,
2903 because we do not have BINFO for those. Eventually we should fix
2904 our representation to allow this case to be handled, too.
2905 In the case we see store of BINFO we however may assume
2906 that standard folding will be able to cope with it. */
2907 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2908 offset, vtable);
2909 }
2910
2911 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2912 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2913 and insert them in NODES.
2914
2915 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2916
2917 static void
2918 record_targets_from_bases (tree otr_type,
2919 HOST_WIDE_INT otr_token,
2920 tree outer_type,
2921 HOST_WIDE_INT offset,
2922 vec <cgraph_node *> &nodes,
2923 hash_set<tree> *inserted,
2924 hash_set<tree> *matched_vtables,
2925 bool *completep)
2926 {
2927 while (true)
2928 {
2929 HOST_WIDE_INT pos, size;
2930 tree base_binfo;
2931 tree fld;
2932
2933 if (types_same_for_odr (outer_type, otr_type))
2934 return;
2935
2936 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2937 {
2938 if (TREE_CODE (fld) != FIELD_DECL)
2939 continue;
2940
2941 pos = int_bit_position (fld);
2942 size = tree_to_shwi (DECL_SIZE (fld));
2943 if (pos <= offset && (pos + size) > offset
2944 /* Do not get confused by zero sized bases. */
2945 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2946 break;
2947 }
2948 /* Within a class type we should always find corresponding fields. */
2949 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2950
2951 /* Nonbase types should have been stripped by outer_class_type. */
2952 gcc_assert (DECL_ARTIFICIAL (fld));
2953
2954 outer_type = TREE_TYPE (fld);
2955 offset -= pos;
2956
2957 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2958 offset, otr_type);
2959 if (!base_binfo)
2960 {
2961 gcc_assert (odr_violation_reported);
2962 return;
2963 }
2964 gcc_assert (base_binfo);
2965 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2966 {
2967 bool can_refer;
2968 tree target = gimple_get_virt_method_for_binfo (otr_token,
2969 base_binfo,
2970 &can_refer);
2971 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2972 maybe_record_node (nodes, target, inserted, can_refer, completep);
2973 matched_vtables->add (BINFO_VTABLE (base_binfo));
2974 }
2975 }
2976 }
2977
2978 /* When virtual table is removed, we may need to flush the cache. */
2979
2980 static void
2981 devirt_variable_node_removal_hook (varpool_node *n,
2982 void *d ATTRIBUTE_UNUSED)
2983 {
2984 if (cached_polymorphic_call_targets
2985 && DECL_VIRTUAL_P (n->decl)
2986 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2987 free_polymorphic_call_targets_hash ();
2988 }
2989
2990 /* Record about how many calls would benefit from given type to be final. */
2991
2992 struct odr_type_warn_count
2993 {
2994 tree type;
2995 int count;
2996 gcov_type dyn_count;
2997 };
2998
2999 /* Record about how many calls would benefit from given method to be final. */
3000
3001 struct decl_warn_count
3002 {
3003 tree decl;
3004 int count;
3005 gcov_type dyn_count;
3006 };
3007
3008 /* Information about type and decl warnings. */
3009
3010 struct final_warning_record
3011 {
3012 gcov_type dyn_count;
3013 vec<odr_type_warn_count> type_warnings;
3014 hash_map<tree, decl_warn_count> decl_warnings;
3015 };
3016 struct final_warning_record *final_warning_records;
3017
3018 /* Return vector containing possible targets of polymorphic call of type
3019 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3020 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3021 OTR_TYPE and include their virtual method. This is useful for types
3022 possibly in construction or destruction where the virtual table may
3023 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3024 us to walk the inheritance graph for all derivations.
3025
3026 If COMPLETEP is non-NULL, store true if the list is complete.
3027 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3028 in the target cache. If user needs to visit every target list
3029 just once, it can memoize them.
3030
3031 If SPECULATIVE is set, the list will not contain targets that
3032 are not speculatively taken.
3033
3034 Returned vector is placed into cache. It is NOT caller's responsibility
3035 to free it. The vector can be freed on cgraph_remove_node call if
3036 the particular node is a virtual function present in the cache. */
3037
3038 vec <cgraph_node *>
3039 possible_polymorphic_call_targets (tree otr_type,
3040 HOST_WIDE_INT otr_token,
3041 ipa_polymorphic_call_context context,
3042 bool *completep,
3043 void **cache_token,
3044 bool speculative)
3045 {
3046 static struct cgraph_node_hook_list *node_removal_hook_holder;
3047 vec <cgraph_node *> nodes = vNULL;
3048 auto_vec <tree, 8> bases_to_consider;
3049 odr_type type, outer_type;
3050 polymorphic_call_target_d key;
3051 polymorphic_call_target_d **slot;
3052 unsigned int i;
3053 tree binfo, target;
3054 bool complete;
3055 bool can_refer = false;
3056 bool skipped = false;
3057
3058 otr_type = TYPE_MAIN_VARIANT (otr_type);
3059
3060 /* If ODR is not initialized or the context is invalid, return empty
3061 incomplete list. */
3062 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3063 {
3064 if (completep)
3065 *completep = context.invalid;
3066 if (cache_token)
3067 *cache_token = NULL;
3068 return nodes;
3069 }
3070
3071 /* Do not bother to compute speculative info when user do not asks for it. */
3072 if (!speculative || !context.speculative_outer_type)
3073 context.clear_speculation ();
3074
3075 type = get_odr_type (otr_type, true);
3076
3077 /* Recording type variants would waste results cache. */
3078 gcc_assert (!context.outer_type
3079 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3080
3081 /* Look up the outer class type we want to walk.
3082 If we fail to do so, the context is invalid. */
3083 if ((context.outer_type || context.speculative_outer_type)
3084 && !context.restrict_to_inner_class (otr_type))
3085 {
3086 if (completep)
3087 *completep = true;
3088 if (cache_token)
3089 *cache_token = NULL;
3090 return nodes;
3091 }
3092 gcc_assert (!context.invalid);
3093
3094 /* Check that restrict_to_inner_class kept the main variant. */
3095 gcc_assert (!context.outer_type
3096 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3097
3098 /* We canonicalize our query, so we do not need extra hashtable entries. */
3099
3100 /* Without outer type, we have no use for offset. Just do the
3101 basic search from inner type. */
3102 if (!context.outer_type)
3103 context.clear_outer_type (otr_type);
3104 /* We need to update our hierarchy if the type does not exist. */
3105 outer_type = get_odr_type (context.outer_type, true);
3106 /* If the type is complete, there are no derivations. */
3107 if (TYPE_FINAL_P (outer_type->type))
3108 context.maybe_derived_type = false;
3109
3110 /* Initialize query cache. */
3111 if (!cached_polymorphic_call_targets)
3112 {
3113 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3114 polymorphic_call_target_hash
3115 = new polymorphic_call_target_hash_type (23);
3116 if (!node_removal_hook_holder)
3117 {
3118 node_removal_hook_holder =
3119 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3120 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3121 NULL);
3122 }
3123 }
3124
3125 if (in_lto_p)
3126 {
3127 if (context.outer_type != otr_type)
3128 context.outer_type
3129 = get_odr_type (context.outer_type, true)->type;
3130 if (context.speculative_outer_type)
3131 context.speculative_outer_type
3132 = get_odr_type (context.speculative_outer_type, true)->type;
3133 }
3134
3135 /* Look up cached answer. */
3136 key.type = type;
3137 key.otr_token = otr_token;
3138 key.speculative = speculative;
3139 key.context = context;
3140 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3141 if (cache_token)
3142 *cache_token = (void *)*slot;
3143 if (*slot)
3144 {
3145 if (completep)
3146 *completep = (*slot)->complete;
3147 if ((*slot)->type_warning && final_warning_records)
3148 {
3149 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3150 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3151 += final_warning_records->dyn_count;
3152 }
3153 if (!speculative && (*slot)->decl_warning && final_warning_records)
3154 {
3155 struct decl_warn_count *c =
3156 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3157 c->count++;
3158 c->dyn_count += final_warning_records->dyn_count;
3159 }
3160 return (*slot)->targets;
3161 }
3162
3163 complete = true;
3164
3165 /* Do actual search. */
3166 timevar_push (TV_IPA_VIRTUAL_CALL);
3167 *slot = XCNEW (polymorphic_call_target_d);
3168 if (cache_token)
3169 *cache_token = (void *)*slot;
3170 (*slot)->type = type;
3171 (*slot)->otr_token = otr_token;
3172 (*slot)->context = context;
3173 (*slot)->speculative = speculative;
3174
3175 hash_set<tree> inserted;
3176 hash_set<tree> matched_vtables;
3177
3178 /* First insert targets we speculatively identified as likely. */
3179 if (context.speculative_outer_type)
3180 {
3181 odr_type speculative_outer_type;
3182 bool speculation_complete = true;
3183
3184 /* First insert target from type itself and check if it may have
3185 derived types. */
3186 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3187 if (TYPE_FINAL_P (speculative_outer_type->type))
3188 context.speculative_maybe_derived_type = false;
3189 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3190 context.speculative_offset, otr_type);
3191 if (binfo)
3192 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3193 &can_refer);
3194 else
3195 target = NULL;
3196
3197 /* In the case we get complete method, we don't need
3198 to walk derivations. */
3199 if (target && DECL_FINAL_P (target))
3200 context.speculative_maybe_derived_type = false;
3201 if (type_possibly_instantiated_p (speculative_outer_type->type))
3202 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3203 if (binfo)
3204 matched_vtables.add (BINFO_VTABLE (binfo));
3205
3206
3207 /* Next walk recursively all derived types. */
3208 if (context.speculative_maybe_derived_type)
3209 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3210 possible_polymorphic_call_targets_1 (nodes, &inserted,
3211 &matched_vtables,
3212 otr_type,
3213 speculative_outer_type->derived_types[i],
3214 otr_token, speculative_outer_type->type,
3215 context.speculative_offset,
3216 &speculation_complete,
3217 bases_to_consider,
3218 false);
3219 }
3220
3221 if (!speculative || !nodes.length ())
3222 {
3223 /* First see virtual method of type itself. */
3224 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3225 context.offset, otr_type);
3226 if (binfo)
3227 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3228 &can_refer);
3229 else
3230 {
3231 gcc_assert (odr_violation_reported);
3232 target = NULL;
3233 }
3234
3235 /* Destructors are never called through construction virtual tables,
3236 because the type is always known. */
3237 if (target && DECL_CXX_DESTRUCTOR_P (target))
3238 context.maybe_in_construction = false;
3239
3240 if (target)
3241 {
3242 /* In the case we get complete method, we don't need
3243 to walk derivations. */
3244 if (DECL_FINAL_P (target))
3245 context.maybe_derived_type = false;
3246 }
3247
3248 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3249 if (type_possibly_instantiated_p (outer_type->type))
3250 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3251 else
3252 skipped = true;
3253
3254 if (binfo)
3255 matched_vtables.add (BINFO_VTABLE (binfo));
3256
3257 /* Next walk recursively all derived types. */
3258 if (context.maybe_derived_type)
3259 {
3260 for (i = 0; i < outer_type->derived_types.length(); i++)
3261 possible_polymorphic_call_targets_1 (nodes, &inserted,
3262 &matched_vtables,
3263 otr_type,
3264 outer_type->derived_types[i],
3265 otr_token, outer_type->type,
3266 context.offset, &complete,
3267 bases_to_consider,
3268 context.maybe_in_construction);
3269
3270 if (!outer_type->all_derivations_known)
3271 {
3272 if (!speculative && final_warning_records)
3273 {
3274 if (complete
3275 && nodes.length () == 1
3276 && warn_suggest_final_types
3277 && !outer_type->derived_types.length ())
3278 {
3279 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3280 final_warning_records->type_warnings.safe_grow_cleared
3281 (odr_types.length ());
3282 final_warning_records->type_warnings[outer_type->id].count++;
3283 final_warning_records->type_warnings[outer_type->id].dyn_count
3284 += final_warning_records->dyn_count;
3285 final_warning_records->type_warnings[outer_type->id].type
3286 = outer_type->type;
3287 (*slot)->type_warning = outer_type->id + 1;
3288 }
3289 if (complete
3290 && warn_suggest_final_methods
3291 && nodes.length () == 1
3292 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3293 outer_type->type))
3294 {
3295 bool existed;
3296 struct decl_warn_count &c =
3297 final_warning_records->decl_warnings.get_or_insert
3298 (nodes[0]->decl, &existed);
3299
3300 if (existed)
3301 {
3302 c.count++;
3303 c.dyn_count += final_warning_records->dyn_count;
3304 }
3305 else
3306 {
3307 c.count = 1;
3308 c.dyn_count = final_warning_records->dyn_count;
3309 c.decl = nodes[0]->decl;
3310 }
3311 (*slot)->decl_warning = nodes[0]->decl;
3312 }
3313 }
3314 complete = false;
3315 }
3316 }
3317
3318 if (!speculative)
3319 {
3320 /* Destructors are never called through construction virtual tables,
3321 because the type is always known. One of entries may be
3322 cxa_pure_virtual so look to at least two of them. */
3323 if (context.maybe_in_construction)
3324 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3325 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3326 context.maybe_in_construction = false;
3327 if (context.maybe_in_construction)
3328 {
3329 if (type != outer_type
3330 && (!skipped
3331 || (context.maybe_derived_type
3332 && !type_all_derivations_known_p (outer_type->type))))
3333 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3334 context.offset, nodes, &inserted,
3335 &matched_vtables, &complete);
3336 if (skipped)
3337 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3338 for (i = 0; i < bases_to_consider.length(); i++)
3339 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3340 }
3341 }
3342 }
3343
3344 (*slot)->targets = nodes;
3345 (*slot)->complete = complete;
3346 if (completep)
3347 *completep = complete;
3348
3349 timevar_pop (TV_IPA_VIRTUAL_CALL);
3350 return nodes;
3351 }
3352
3353 bool
3354 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3355 vec<const decl_warn_count*> *vec)
3356 {
3357 vec->safe_push (&value);
3358 return true;
3359 }
3360
3361 /* Dump target list TARGETS into FILE. */
3362
3363 static void
3364 dump_targets (FILE *f, vec <cgraph_node *> targets)
3365 {
3366 unsigned int i;
3367
3368 for (i = 0; i < targets.length (); i++)
3369 {
3370 char *name = NULL;
3371 if (in_lto_p)
3372 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3373 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3374 if (in_lto_p)
3375 free (name);
3376 if (!targets[i]->definition)
3377 fprintf (f, " (no definition%s)",
3378 DECL_DECLARED_INLINE_P (targets[i]->decl)
3379 ? " inline" : "");
3380 }
3381 fprintf (f, "\n");
3382 }
3383
3384 /* Dump all possible targets of a polymorphic call. */
3385
3386 void
3387 dump_possible_polymorphic_call_targets (FILE *f,
3388 tree otr_type,
3389 HOST_WIDE_INT otr_token,
3390 const ipa_polymorphic_call_context &ctx)
3391 {
3392 vec <cgraph_node *> targets;
3393 bool final;
3394 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3395 unsigned int len;
3396
3397 if (!type)
3398 return;
3399 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3400 ctx,
3401 &final, NULL, false);
3402 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3403 print_generic_expr (f, type->type, TDF_SLIM);
3404 fprintf (f, " token %i\n", (int)otr_token);
3405
3406 ctx.dump (f);
3407
3408 fprintf (f, " %s%s%s%s\n ",
3409 final ? "This is a complete list." :
3410 "This is partial list; extra targets may be defined in other units.",
3411 ctx.maybe_in_construction ? " (base types included)" : "",
3412 ctx.maybe_derived_type ? " (derived types included)" : "",
3413 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3414 len = targets.length ();
3415 dump_targets (f, targets);
3416
3417 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3418 ctx,
3419 &final, NULL, true);
3420 if (targets.length () != len)
3421 {
3422 fprintf (f, " Speculative targets:");
3423 dump_targets (f, targets);
3424 }
3425 gcc_assert (targets.length () <= len);
3426 fprintf (f, "\n");
3427 }
3428
3429
3430 /* Return true if N can be possibly target of a polymorphic call of
3431 OTR_TYPE/OTR_TOKEN. */
3432
3433 bool
3434 possible_polymorphic_call_target_p (tree otr_type,
3435 HOST_WIDE_INT otr_token,
3436 const ipa_polymorphic_call_context &ctx,
3437 struct cgraph_node *n)
3438 {
3439 vec <cgraph_node *> targets;
3440 unsigned int i;
3441 enum built_in_function fcode;
3442 bool final;
3443
3444 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3445 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3446 == BUILT_IN_UNREACHABLE
3447 || fcode == BUILT_IN_TRAP))
3448 return true;
3449
3450 if (!odr_hash)
3451 return true;
3452 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3453 for (i = 0; i < targets.length (); i++)
3454 if (n->semantically_equivalent_p (targets[i]))
3455 return true;
3456
3457 /* At a moment we allow middle end to dig out new external declarations
3458 as a targets of polymorphic calls. */
3459 if (!final && !n->definition)
3460 return true;
3461 return false;
3462 }
3463
3464
3465
3466 /* Return true if N can be possibly target of a polymorphic call of
3467 OBJ_TYPE_REF expression REF in STMT. */
3468
3469 bool
3470 possible_polymorphic_call_target_p (tree ref,
3471 gimple stmt,
3472 struct cgraph_node *n)
3473 {
3474 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3475 tree call_fn = gimple_call_fn (stmt);
3476
3477 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3478 tree_to_uhwi
3479 (OBJ_TYPE_REF_TOKEN (call_fn)),
3480 context,
3481 n);
3482 }
3483
3484
3485 /* After callgraph construction new external nodes may appear.
3486 Add them into the graph. */
3487
3488 void
3489 update_type_inheritance_graph (void)
3490 {
3491 struct cgraph_node *n;
3492
3493 if (!odr_hash)
3494 return;
3495 free_polymorphic_call_targets_hash ();
3496 timevar_push (TV_IPA_INHERITANCE);
3497 /* We reconstruct the graph starting from types of all methods seen in the
3498 the unit. */
3499 FOR_EACH_FUNCTION (n)
3500 if (DECL_VIRTUAL_P (n->decl)
3501 && !n->definition
3502 && n->real_symbol_p ())
3503 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3504 timevar_pop (TV_IPA_INHERITANCE);
3505 }
3506
3507
3508 /* Return true if N looks like likely target of a polymorphic call.
3509 Rule out cxa_pure_virtual, noreturns, function declared cold and
3510 other obvious cases. */
3511
3512 bool
3513 likely_target_p (struct cgraph_node *n)
3514 {
3515 int flags;
3516 /* cxa_pure_virtual and similar things are not likely. */
3517 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3518 return false;
3519 flags = flags_from_decl_or_type (n->decl);
3520 if (flags & ECF_NORETURN)
3521 return false;
3522 if (lookup_attribute ("cold",
3523 DECL_ATTRIBUTES (n->decl)))
3524 return false;
3525 if (n->frequency < NODE_FREQUENCY_NORMAL)
3526 return false;
3527 /* If there are no live virtual tables referring the target,
3528 the only way the target can be called is an instance coming from other
3529 compilation unit; speculative devirtualization is built around an
3530 assumption that won't happen. */
3531 if (!referenced_from_vtable_p (n))
3532 return false;
3533 return true;
3534 }
3535
3536 /* Compare type warning records P1 and P2 and choose one with larger count;
3537 helper for qsort. */
3538
3539 int
3540 type_warning_cmp (const void *p1, const void *p2)
3541 {
3542 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3543 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3544
3545 if (t1->dyn_count < t2->dyn_count)
3546 return 1;
3547 if (t1->dyn_count > t2->dyn_count)
3548 return -1;
3549 return t2->count - t1->count;
3550 }
3551
3552 /* Compare decl warning records P1 and P2 and choose one with larger count;
3553 helper for qsort. */
3554
3555 int
3556 decl_warning_cmp (const void *p1, const void *p2)
3557 {
3558 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3559 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3560
3561 if (t1->dyn_count < t2->dyn_count)
3562 return 1;
3563 if (t1->dyn_count > t2->dyn_count)
3564 return -1;
3565 return t2->count - t1->count;
3566 }
3567
3568
3569 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3570 context CTX. */
3571
3572 struct cgraph_node *
3573 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3574 ipa_polymorphic_call_context ctx)
3575 {
3576 vec <cgraph_node *>targets
3577 = possible_polymorphic_call_targets
3578 (otr_type, otr_token, ctx, NULL, NULL, true);
3579 unsigned int i;
3580 struct cgraph_node *likely_target = NULL;
3581
3582 for (i = 0; i < targets.length (); i++)
3583 if (likely_target_p (targets[i]))
3584 {
3585 if (likely_target)
3586 return NULL;
3587 likely_target = targets[i];
3588 }
3589 if (!likely_target
3590 ||!likely_target->definition
3591 || DECL_EXTERNAL (likely_target->decl))
3592 return NULL;
3593
3594 /* Don't use an implicitly-declared destructor (c++/58678). */
3595 struct cgraph_node *non_thunk_target
3596 = likely_target->function_symbol ();
3597 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3598 return NULL;
3599 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3600 && likely_target->can_be_discarded_p ())
3601 return NULL;
3602 return likely_target;
3603 }
3604
3605 /* The ipa-devirt pass.
3606 When polymorphic call has only one likely target in the unit,
3607 turn it into a speculative call. */
3608
3609 static unsigned int
3610 ipa_devirt (void)
3611 {
3612 struct cgraph_node *n;
3613 hash_set<void *> bad_call_targets;
3614 struct cgraph_edge *e;
3615
3616 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3617 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3618 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3619 int ndropped = 0;
3620
3621 if (!odr_types_ptr)
3622 return 0;
3623
3624 if (dump_file)
3625 dump_type_inheritance_graph (dump_file);
3626
3627 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3628 This is implemented by setting up final_warning_records that are updated
3629 by get_polymorphic_call_targets.
3630 We need to clear cache in this case to trigger recomputation of all
3631 entries. */
3632 if (warn_suggest_final_methods || warn_suggest_final_types)
3633 {
3634 final_warning_records = new (final_warning_record);
3635 final_warning_records->type_warnings = vNULL;
3636 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3637 free_polymorphic_call_targets_hash ();
3638 }
3639
3640 FOR_EACH_DEFINED_FUNCTION (n)
3641 {
3642 bool update = false;
3643 if (!opt_for_fn (n->decl, flag_devirtualize))
3644 continue;
3645 if (dump_file && n->indirect_calls)
3646 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3647 n->name (), n->order);
3648 for (e = n->indirect_calls; e; e = e->next_callee)
3649 if (e->indirect_info->polymorphic)
3650 {
3651 struct cgraph_node *likely_target = NULL;
3652 void *cache_token;
3653 bool final;
3654
3655 if (final_warning_records)
3656 final_warning_records->dyn_count = e->count;
3657
3658 vec <cgraph_node *>targets
3659 = possible_polymorphic_call_targets
3660 (e, &final, &cache_token, true);
3661 unsigned int i;
3662
3663 /* Trigger warnings by calculating non-speculative targets. */
3664 if (warn_suggest_final_methods || warn_suggest_final_types)
3665 possible_polymorphic_call_targets (e);
3666
3667 if (dump_file)
3668 dump_possible_polymorphic_call_targets
3669 (dump_file, e);
3670
3671 npolymorphic++;
3672
3673 /* See if the call can be devirtualized by means of ipa-prop's
3674 polymorphic call context propagation. If not, we can just
3675 forget about this call being polymorphic and avoid some heavy
3676 lifting in remove_unreachable_nodes that will otherwise try to
3677 keep all possible targets alive until inlining and in the inliner
3678 itself.
3679
3680 This may need to be revisited once we add further ways to use
3681 the may edges, but it is a resonable thing to do right now. */
3682
3683 if ((e->indirect_info->param_index == -1
3684 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3685 && e->indirect_info->vptr_changed))
3686 && !flag_ltrans_devirtualize)
3687 {
3688 e->indirect_info->polymorphic = false;
3689 ndropped++;
3690 if (dump_file)
3691 fprintf (dump_file, "Dropping polymorphic call info;"
3692 " it can not be used by ipa-prop\n");
3693 }
3694
3695 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3696 continue;
3697
3698 if (!e->maybe_hot_p ())
3699 {
3700 if (dump_file)
3701 fprintf (dump_file, "Call is cold\n\n");
3702 ncold++;
3703 continue;
3704 }
3705 if (e->speculative)
3706 {
3707 if (dump_file)
3708 fprintf (dump_file, "Call is already speculated\n\n");
3709 nspeculated++;
3710
3711 /* When dumping see if we agree with speculation. */
3712 if (!dump_file)
3713 continue;
3714 }
3715 if (bad_call_targets.contains (cache_token))
3716 {
3717 if (dump_file)
3718 fprintf (dump_file, "Target list is known to be useless\n\n");
3719 nmultiple++;
3720 continue;
3721 }
3722 for (i = 0; i < targets.length (); i++)
3723 if (likely_target_p (targets[i]))
3724 {
3725 if (likely_target)
3726 {
3727 likely_target = NULL;
3728 if (dump_file)
3729 fprintf (dump_file, "More than one likely target\n\n");
3730 nmultiple++;
3731 break;
3732 }
3733 likely_target = targets[i];
3734 }
3735 if (!likely_target)
3736 {
3737 bad_call_targets.add (cache_token);
3738 continue;
3739 }
3740 /* This is reached only when dumping; check if we agree or disagree
3741 with the speculation. */
3742 if (e->speculative)
3743 {
3744 struct cgraph_edge *e2;
3745 struct ipa_ref *ref;
3746 e->speculative_call_info (e2, e, ref);
3747 if (e2->callee->ultimate_alias_target ()
3748 == likely_target->ultimate_alias_target ())
3749 {
3750 fprintf (dump_file, "We agree with speculation\n\n");
3751 nok++;
3752 }
3753 else
3754 {
3755 fprintf (dump_file, "We disagree with speculation\n\n");
3756 nwrong++;
3757 }
3758 continue;
3759 }
3760 if (!likely_target->definition)
3761 {
3762 if (dump_file)
3763 fprintf (dump_file, "Target is not a definition\n\n");
3764 nnotdefined++;
3765 continue;
3766 }
3767 /* Do not introduce new references to external symbols. While we
3768 can handle these just well, it is common for programs to
3769 incorrectly with headers defining methods they are linked
3770 with. */
3771 if (DECL_EXTERNAL (likely_target->decl))
3772 {
3773 if (dump_file)
3774 fprintf (dump_file, "Target is external\n\n");
3775 nexternal++;
3776 continue;
3777 }
3778 /* Don't use an implicitly-declared destructor (c++/58678). */
3779 struct cgraph_node *non_thunk_target
3780 = likely_target->function_symbol ();
3781 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3782 {
3783 if (dump_file)
3784 fprintf (dump_file, "Target is artificial\n\n");
3785 nartificial++;
3786 continue;
3787 }
3788 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3789 && likely_target->can_be_discarded_p ())
3790 {
3791 if (dump_file)
3792 fprintf (dump_file, "Target is overwritable\n\n");
3793 noverwritable++;
3794 continue;
3795 }
3796 else if (dbg_cnt (devirt))
3797 {
3798 if (dump_enabled_p ())
3799 {
3800 location_t locus = gimple_location_safe (e->call_stmt);
3801 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3802 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3803 n->name (), n->order,
3804 likely_target->name (),
3805 likely_target->order);
3806 }
3807 if (!likely_target->can_be_discarded_p ())
3808 {
3809 cgraph_node *alias;
3810 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3811 if (alias)
3812 likely_target = alias;
3813 }
3814 nconverted++;
3815 update = true;
3816 e->make_speculative
3817 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3818 }
3819 }
3820 if (update)
3821 inline_update_overall_summary (n);
3822 }
3823 if (warn_suggest_final_methods || warn_suggest_final_types)
3824 {
3825 if (warn_suggest_final_types)
3826 {
3827 final_warning_records->type_warnings.qsort (type_warning_cmp);
3828 for (unsigned int i = 0;
3829 i < final_warning_records->type_warnings.length (); i++)
3830 if (final_warning_records->type_warnings[i].count)
3831 {
3832 tree type = final_warning_records->type_warnings[i].type;
3833 int count = final_warning_records->type_warnings[i].count;
3834 long long dyn_count
3835 = final_warning_records->type_warnings[i].dyn_count;
3836
3837 if (!dyn_count)
3838 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3839 OPT_Wsuggest_final_types, count,
3840 "Declaring type %qD final "
3841 "would enable devirtualization of %i call",
3842 "Declaring type %qD final "
3843 "would enable devirtualization of %i calls",
3844 type,
3845 count);
3846 else
3847 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3848 OPT_Wsuggest_final_types, count,
3849 "Declaring type %qD final "
3850 "would enable devirtualization of %i call "
3851 "executed %lli times",
3852 "Declaring type %qD final "
3853 "would enable devirtualization of %i calls "
3854 "executed %lli times",
3855 type,
3856 count,
3857 dyn_count);
3858 }
3859 }
3860
3861 if (warn_suggest_final_methods)
3862 {
3863 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3864
3865 final_warning_records->decl_warnings.traverse
3866 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3867 decl_warnings_vec.qsort (decl_warning_cmp);
3868 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3869 {
3870 tree decl = decl_warnings_vec[i]->decl;
3871 int count = decl_warnings_vec[i]->count;
3872 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3873
3874 if (!dyn_count)
3875 if (DECL_CXX_DESTRUCTOR_P (decl))
3876 warning_n (DECL_SOURCE_LOCATION (decl),
3877 OPT_Wsuggest_final_methods, count,
3878 "Declaring virtual destructor of %qD final "
3879 "would enable devirtualization of %i call",
3880 "Declaring virtual destructor of %qD final "
3881 "would enable devirtualization of %i calls",
3882 DECL_CONTEXT (decl), count);
3883 else
3884 warning_n (DECL_SOURCE_LOCATION (decl),
3885 OPT_Wsuggest_final_methods, count,
3886 "Declaring method %qD final "
3887 "would enable devirtualization of %i call",
3888 "Declaring method %qD final "
3889 "would enable devirtualization of %i calls",
3890 decl, count);
3891 else if (DECL_CXX_DESTRUCTOR_P (decl))
3892 warning_n (DECL_SOURCE_LOCATION (decl),
3893 OPT_Wsuggest_final_methods, count,
3894 "Declaring virtual destructor of %qD final "
3895 "would enable devirtualization of %i call "
3896 "executed %lli times",
3897 "Declaring virtual destructor of %qD final "
3898 "would enable devirtualization of %i calls "
3899 "executed %lli times",
3900 DECL_CONTEXT (decl), count, dyn_count);
3901 else
3902 warning_n (DECL_SOURCE_LOCATION (decl),
3903 OPT_Wsuggest_final_methods, count,
3904 "Declaring method %qD final "
3905 "would enable devirtualization of %i call "
3906 "executed %lli times",
3907 "Declaring method %qD final "
3908 "would enable devirtualization of %i calls "
3909 "executed %lli times",
3910 decl, count, dyn_count);
3911 }
3912 }
3913
3914 delete (final_warning_records);
3915 final_warning_records = 0;
3916 }
3917
3918 if (dump_file)
3919 fprintf (dump_file,
3920 "%i polymorphic calls, %i devirtualized,"
3921 " %i speculatively devirtualized, %i cold\n"
3922 "%i have multiple targets, %i overwritable,"
3923 " %i already speculated (%i agree, %i disagree),"
3924 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3925 npolymorphic, ndevirtualized, nconverted, ncold,
3926 nmultiple, noverwritable, nspeculated, nok, nwrong,
3927 nexternal, nnotdefined, nartificial, ndropped);
3928 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3929 }
3930
3931 namespace {
3932
3933 const pass_data pass_data_ipa_devirt =
3934 {
3935 IPA_PASS, /* type */
3936 "devirt", /* name */
3937 OPTGROUP_NONE, /* optinfo_flags */
3938 TV_IPA_DEVIRT, /* tv_id */
3939 0, /* properties_required */
3940 0, /* properties_provided */
3941 0, /* properties_destroyed */
3942 0, /* todo_flags_start */
3943 ( TODO_dump_symtab ), /* todo_flags_finish */
3944 };
3945
3946 class pass_ipa_devirt : public ipa_opt_pass_d
3947 {
3948 public:
3949 pass_ipa_devirt (gcc::context *ctxt)
3950 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3951 NULL, /* generate_summary */
3952 NULL, /* write_summary */
3953 NULL, /* read_summary */
3954 NULL, /* write_optimization_summary */
3955 NULL, /* read_optimization_summary */
3956 NULL, /* stmt_fixup */
3957 0, /* function_transform_todo_flags_start */
3958 NULL, /* function_transform */
3959 NULL) /* variable_transform */
3960 {}
3961
3962 /* opt_pass methods: */
3963 virtual bool gate (function *)
3964 {
3965 /* In LTO, always run the IPA passes and decide on function basis if the
3966 pass is enabled. */
3967 if (in_lto_p)
3968 return true;
3969 return (flag_devirtualize
3970 && (flag_devirtualize_speculatively
3971 || (warn_suggest_final_methods
3972 || warn_suggest_final_types))
3973 && optimize);
3974 }
3975
3976 virtual unsigned int execute (function *) { return ipa_devirt (); }
3977
3978 }; // class pass_ipa_devirt
3979
3980 } // anon namespace
3981
3982 ipa_opt_pass_d *
3983 make_pass_ipa_devirt (gcc::context *ctxt)
3984 {
3985 return new pass_ipa_devirt (ctxt);
3986 }
3987
3988 #include "gt-ipa-devirt.h"