]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-devirt.c
ipa-prop uses symbol_summary class.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "predict.h"
116 #include "basic-block.h"
117 #include "hash-map.h"
118 #include "is-a.h"
119 #include "plugin-api.h"
120 #include "vec.h"
121 #include "hashtab.h"
122 #include "hash-set.h"
123 #include "machmode.h"
124 #include "hard-reg-set.h"
125 #include "input.h"
126 #include "function.h"
127 #include "ipa-ref.h"
128 #include "cgraph.h"
129 #include "expr.h"
130 #include "tree-pass.h"
131 #include "target.h"
132 #include "hash-table.h"
133 #include "inchash.h"
134 #include "tree-pretty-print.h"
135 #include "ipa-utils.h"
136 #include "tree-ssa-alias.h"
137 #include "internal-fn.h"
138 #include "gimple-fold.h"
139 #include "gimple-expr.h"
140 #include "gimple.h"
141 #include "alloc-pool.h"
142 #include "symbol-summary.h"
143 #include "ipa-prop.h"
144 #include "ipa-inline.h"
145 #include "diagnostic.h"
146 #include "tree-dfa.h"
147 #include "demangle.h"
148 #include "dbgcnt.h"
149 #include "gimple-pretty-print.h"
150 #include "stor-layout.h"
151 #include "intl.h"
152
153 /* Hash based set of pairs of types. */
154 typedef struct
155 {
156 tree first;
157 tree second;
158 } type_pair;
159
160 struct pair_traits : default_hashset_traits
161 {
162 static hashval_t
163 hash (type_pair p)
164 {
165 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
166 }
167 static bool
168 is_empty (type_pair p)
169 {
170 return p.first == NULL;
171 }
172 static bool
173 is_deleted (type_pair p ATTRIBUTE_UNUSED)
174 {
175 return false;
176 }
177 static bool
178 equal (const type_pair &a, const type_pair &b)
179 {
180 return a.first==b.first && a.second == b.second;
181 }
182 static void
183 mark_empty (type_pair &e)
184 {
185 e.first = NULL;
186 }
187 };
188
189 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
190 hash_set<type_pair,pair_traits> *);
191
192 static bool odr_violation_reported = false;
193
194
195 /* Pointer set of all call targets appearing in the cache. */
196 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
197
198 /* The node of type inheritance graph. For each type unique in
199 One Defintion Rule (ODR) sense, we produce one node linking all
200 main variants of types equivalent to it, bases and derived types. */
201
202 struct GTY(()) odr_type_d
203 {
204 /* leader type. */
205 tree type;
206 /* All bases; built only for main variants of types */
207 vec<odr_type> GTY((skip)) bases;
208 /* All derrived types with virtual methods seen in unit;
209 built only for main variants oftypes */
210 vec<odr_type> GTY((skip)) derived_types;
211
212 /* All equivalent types, if more than one. */
213 vec<tree, va_gc> *types;
214 /* Set of all equivalent types, if NON-NULL. */
215 hash_set<tree> * GTY((skip)) types_set;
216
217 /* Unique ID indexing the type in odr_types array. */
218 int id;
219 /* Is it in anonymous namespace? */
220 bool anonymous_namespace;
221 /* Do we know about all derivations of given type? */
222 bool all_derivations_known;
223 /* Did we report ODR violation here? */
224 bool odr_violated;
225 };
226
227 /* Return TRUE if all derived types of T are known and thus
228 we may consider the walk of derived type complete.
229
230 This is typically true only for final anonymous namespace types and types
231 defined within functions (that may be COMDAT and thus shared across units,
232 but with the same set of derived types). */
233
234 bool
235 type_all_derivations_known_p (const_tree t)
236 {
237 if (TYPE_FINAL_P (t))
238 return true;
239 if (flag_ltrans)
240 return false;
241 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
242 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
243 return true;
244 if (type_in_anonymous_namespace_p (t))
245 return true;
246 return (decl_function_context (TYPE_NAME (t)) != NULL);
247 }
248
249 /* Return TURE if type's constructors are all visible. */
250
251 static bool
252 type_all_ctors_visible_p (tree t)
253 {
254 return !flag_ltrans
255 && symtab->state >= CONSTRUCTION
256 /* We can not always use type_all_derivations_known_p.
257 For function local types we must assume case where
258 the function is COMDAT and shared in between units.
259
260 TODO: These cases are quite easy to get, but we need
261 to keep track of C++ privatizing via -Wno-weak
262 as well as the IPA privatizing. */
263 && type_in_anonymous_namespace_p (t);
264 }
265
266 /* Return TRUE if type may have instance. */
267
268 static bool
269 type_possibly_instantiated_p (tree t)
270 {
271 tree vtable;
272 varpool_node *vnode;
273
274 /* TODO: Add abstract types here. */
275 if (!type_all_ctors_visible_p (t))
276 return true;
277
278 vtable = BINFO_VTABLE (TYPE_BINFO (t));
279 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
280 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
281 vnode = varpool_node::get (vtable);
282 return vnode && vnode->definition;
283 }
284
285 /* One Definition Rule hashtable helpers. */
286
287 struct odr_hasher
288 {
289 typedef odr_type_d value_type;
290 typedef union tree_node compare_type;
291 static inline hashval_t hash (const value_type *);
292 static inline bool equal (const value_type *, const compare_type *);
293 static inline void remove (value_type *);
294 };
295
296 /* Return type that was declared with T's name so that T is an
297 qualified variant of it. */
298
299 static inline tree
300 main_odr_variant (const_tree t)
301 {
302 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
303 return TREE_TYPE (TYPE_NAME (t));
304 /* Unnamed types and non-C++ produced types can be compared by variants. */
305 else
306 return TYPE_MAIN_VARIANT (t);
307 }
308
309 /* Produce hash based on type name. */
310
311 static hashval_t
312 hash_type_name (tree t)
313 {
314 gcc_checking_assert (main_odr_variant (t) == t);
315
316 /* If not in LTO, all main variants are unique, so we can do
317 pointer hash. */
318 if (!in_lto_p)
319 return htab_hash_pointer (t);
320
321 /* Anonymous types are unique. */
322 if (type_in_anonymous_namespace_p (t))
323 return htab_hash_pointer (t);
324
325 /* ODR types have name specified. */
326 if (TYPE_NAME (t)
327 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
328 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
329
330 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
331 we can simply hash the virtual table. */
332 if (TREE_CODE (t) == RECORD_TYPE
333 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
334 {
335 tree v = BINFO_VTABLE (TYPE_BINFO (t));
336 hashval_t hash = 0;
337
338 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
339 {
340 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
341 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
342 }
343
344 v = DECL_ASSEMBLER_NAME (v);
345 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
346 return hash;
347 }
348
349 /* Builtin types may appear as main variants of ODR types and are unique.
350 Sanity check we do not get anything that looks non-builtin. */
351 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
352 || TREE_CODE (t) == VOID_TYPE
353 || TREE_CODE (t) == COMPLEX_TYPE
354 || TREE_CODE (t) == REAL_TYPE
355 || TREE_CODE (t) == POINTER_TYPE);
356 return htab_hash_pointer (t);
357 }
358
359 /* Return the computed hashcode for ODR_TYPE. */
360
361 inline hashval_t
362 odr_hasher::hash (const value_type *odr_type)
363 {
364 return hash_type_name (odr_type->type);
365 }
366
367 /* For languages with One Definition Rule, work out if
368 types are the same based on their name.
369
370 This is non-trivial for LTO where minnor differences in
371 the type representation may have prevented type merging
372 to merge two copies of otherwise equivalent type.
373
374 Until we start streaming mangled type names, this function works
375 only for polymorphic types. */
376
377 bool
378 types_same_for_odr (const_tree type1, const_tree type2)
379 {
380 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
381
382 type1 = main_odr_variant (type1);
383 type2 = main_odr_variant (type2);
384
385 if (type1 == type2)
386 return true;
387
388 if (!in_lto_p)
389 return false;
390
391 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
392 on the corresponding TYPE_STUB_DECL. */
393 if (type_in_anonymous_namespace_p (type1)
394 || type_in_anonymous_namespace_p (type2))
395 return false;
396
397
398 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
399
400 Ideally we should never meed types without ODR names here. It can however
401 happen in two cases:
402
403 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
404 Here testing for equivalence is safe, since their MAIN_VARIANTs are
405 unique.
406 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
407 establish precise ODR equivalency, but for correctness we care only
408 about equivalency on complete polymorphic types. For these we can
409 compare assembler names of their virtual tables. */
410 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
411 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
412 {
413 /* See if types are obvoiusly different (i.e. different codes
414 or polymorphis wrt non-polymorphic). This is not strictly correct
415 for ODR violating programs, but we can't do better without streaming
416 ODR names. */
417 if (TREE_CODE (type1) != TREE_CODE (type2))
418 return false;
419 if (TREE_CODE (type1) == RECORD_TYPE
420 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
421 return false;
422 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
423 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
424 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
425 return false;
426
427 /* At the moment we have no way to establish ODR equivlaence at LTO
428 other than comparing virtual table pointrs of polymorphic types.
429 Eventually we should start saving mangled names in TYPE_NAME.
430 Then this condition will become non-trivial. */
431
432 if (TREE_CODE (type1) == RECORD_TYPE
433 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
434 && BINFO_VTABLE (TYPE_BINFO (type1))
435 && BINFO_VTABLE (TYPE_BINFO (type2)))
436 {
437 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
438 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
439 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
440 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
441 return (operand_equal_p (TREE_OPERAND (v1, 1),
442 TREE_OPERAND (v2, 1), 0)
443 && DECL_ASSEMBLER_NAME
444 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
445 == DECL_ASSEMBLER_NAME
446 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
447 }
448 gcc_unreachable ();
449 }
450 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
451 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
452 }
453
454 /* Return true if we can decide on ODR equivalency.
455
456 In non-LTO it is always decide, in LTO however it depends in the type has
457 ODR info attached. */
458
459 bool
460 types_odr_comparable (tree t1, tree t2)
461 {
462 return (!in_lto_p
463 || main_odr_variant (t1) == main_odr_variant (t2)
464 || (odr_type_p (t1) && odr_type_p (t2))
465 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
466 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
467 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
468 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
469 }
470
471 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
472 known, be conservative and return false. */
473
474 bool
475 types_must_be_same_for_odr (tree t1, tree t2)
476 {
477 if (types_odr_comparable (t1, t2))
478 return types_same_for_odr (t1, t2);
479 else
480 return main_odr_variant (t1) == main_odr_variant (t2);
481 }
482
483 /* Compare types T1 and T2 and return true if they are
484 equivalent. */
485
486 inline bool
487 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
488 {
489 tree t2 = const_cast <tree> (ct2);
490
491 gcc_checking_assert (main_odr_variant (t2) == t2);
492 if (t1->type == t2)
493 return true;
494 if (!in_lto_p)
495 return false;
496 return types_same_for_odr (t1->type, t2);
497 }
498
499 /* Free ODR type V. */
500
501 inline void
502 odr_hasher::remove (value_type *v)
503 {
504 v->bases.release ();
505 v->derived_types.release ();
506 if (v->types_set)
507 delete v->types_set;
508 ggc_free (v);
509 }
510
511 /* ODR type hash used to lookup ODR type based on tree type node. */
512
513 typedef hash_table<odr_hasher> odr_hash_type;
514 static odr_hash_type *odr_hash;
515
516 /* ODR types are also stored into ODR_TYPE vector to allow consistent
517 walking. Bases appear before derived types. Vector is garbage collected
518 so we won't end up visiting empty types. */
519
520 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
521 #define odr_types (*odr_types_ptr)
522
523 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
524 void
525 set_type_binfo (tree type, tree binfo)
526 {
527 for (; type; type = TYPE_NEXT_VARIANT (type))
528 if (COMPLETE_TYPE_P (type))
529 TYPE_BINFO (type) = binfo;
530 else
531 gcc_assert (!TYPE_BINFO (type));
532 }
533
534 /* Compare T2 and T2 based on name or structure. */
535
536 static bool
537 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<type_pair,pair_traits> *visited)
538 {
539 bool an1, an2;
540
541 /* This can happen in incomplete types that should be handled earlier. */
542 gcc_assert (t1 && t2);
543
544 t1 = main_odr_variant (t1);
545 t2 = main_odr_variant (t2);
546 if (t1 == t2)
547 return true;
548
549 /* Anonymous namespace types must match exactly. */
550 an1 = type_in_anonymous_namespace_p (t1);
551 an2 = type_in_anonymous_namespace_p (t2);
552 if (an1 != an2 || an1)
553 return false;
554
555 /* For ODR types be sure to compare their names.
556 To support -wno-odr-type-merging we allow one type to be non-ODR
557 and other ODR even though it is a violation. */
558 if (types_odr_comparable (t1, t2))
559 {
560 if (!types_same_for_odr (t1, t2))
561 return false;
562 /* Limit recursion: If subtypes are ODR types and we know
563 that they are same, be happy. */
564 if (!get_odr_type (t1, true)->odr_violated)
565 return true;
566 }
567
568 /* Component types, builtins and possibly vioalting ODR types
569 have to be compared structurally. */
570 if (TREE_CODE (t1) != TREE_CODE (t2))
571 return false;
572 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
573 return false;
574 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
575 return false;
576
577 type_pair pair={t1,t2};
578 if (TYPE_UID (t1) > TYPE_UID (t2))
579 {
580 pair.first = t2;
581 pair.second = t1;
582 }
583 if (visited->add (pair))
584 return true;
585 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
586 }
587
588 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
589 violation warings. */
590
591 void
592 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
593 {
594 int n1, n2;
595 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
596 {
597 odr_violation_reported = true;
598 if (DECL_VIRTUAL_P (prevailing->decl))
599 {
600 varpool_node *tmp = prevailing;
601 prevailing = vtable;
602 vtable = tmp;
603 }
604 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
605 OPT_Wodr,
606 "virtual table of type %qD violates one definition rule",
607 DECL_CONTEXT (vtable->decl)))
608 inform (DECL_SOURCE_LOCATION (prevailing->decl),
609 "variable of same assembler name as the virtual table is "
610 "defined in another translation unit");
611 return;
612 }
613 if (!prevailing->definition || !vtable->definition)
614 return;
615 for (n1 = 0, n2 = 0; true; n1++, n2++)
616 {
617 struct ipa_ref *ref1, *ref2;
618 bool end1, end2;
619 end1 = !prevailing->iterate_reference (n1, ref1);
620 end2 = !vtable->iterate_reference (n2, ref2);
621 if (end1 && end2)
622 return;
623 if (!end1 && !end2
624 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
625 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
626 && !n2
627 && !DECL_VIRTUAL_P (ref2->referred->decl)
628 && DECL_VIRTUAL_P (ref1->referred->decl))
629 {
630 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
631 "virtual table of type %qD contains RTTI information",
632 DECL_CONTEXT (vtable->decl)))
633 {
634 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
635 "but is prevailed by one without from other translation unit");
636 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
637 "RTTI will not work on this type");
638 }
639 n2++;
640 end2 = !vtable->iterate_reference (n2, ref2);
641 }
642 if (!end1 && !end2
643 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
644 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
645 && !n1
646 && !DECL_VIRTUAL_P (ref1->referred->decl)
647 && DECL_VIRTUAL_P (ref2->referred->decl))
648 {
649 n1++;
650 end1 = !vtable->iterate_reference (n1, ref1);
651 }
652 if (end1 || end2)
653 {
654 if (end1)
655 {
656 varpool_node *tmp = prevailing;
657 prevailing = vtable;
658 vtable = tmp;
659 ref1 = ref2;
660 }
661 if (warning_at (DECL_SOURCE_LOCATION
662 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
663 "virtual table of type %qD violates "
664 "one definition rule",
665 DECL_CONTEXT (vtable->decl)))
666 {
667 inform (DECL_SOURCE_LOCATION
668 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
669 "the conflicting type defined in another translation "
670 "unit");
671 inform (DECL_SOURCE_LOCATION
672 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
673 "contains additional virtual method %qD",
674 ref1->referred->decl);
675 }
676 return;
677 }
678 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
679 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
680 {
681 if (warning_at (DECL_SOURCE_LOCATION
682 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
683 "virtual table of type %qD violates "
684 "one definition rule ",
685 DECL_CONTEXT (vtable->decl)))
686 {
687 inform (DECL_SOURCE_LOCATION
688 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
689 "the conflicting type defined in another translation "
690 "unit");
691 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
692 "virtual method %qD", ref1->referred->decl);
693 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
694 "ought to match virtual method %qD but does not",
695 ref2->referred->decl);
696 return;
697 }
698 }
699 }
700 }
701
702 /* Output ODR violation warning about T1 and T2 with REASON.
703 Display location of ST1 and ST2 if REASON speaks about field or
704 method of the type.
705 If WARN is false, do nothing. Set WARNED if warning was indeed
706 output. */
707
708 void
709 warn_odr (tree t1, tree t2, tree st1, tree st2,
710 bool warn, bool *warned, const char *reason)
711 {
712 tree decl2 = TYPE_NAME (t2);
713
714 if (!warn)
715 return;
716 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
717 "type %qT violates one definition rule",
718 t1))
719 return;
720 if (!st1 && !st2)
721 ;
722 /* For FIELD_DECL support also case where one of fields is
723 NULL - this is used when the structures have mismatching number of
724 elements. */
725 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
726 {
727 inform (DECL_SOURCE_LOCATION (decl2),
728 "a different type is defined in another translation unit");
729 if (!st1)
730 {
731 st1 = st2;
732 st2 = NULL;
733 }
734 inform (DECL_SOURCE_LOCATION (st1),
735 "the first difference of corresponding definitions is field %qD",
736 st1);
737 if (st2)
738 decl2 = st2;
739 }
740 else if (TREE_CODE (st1) == FUNCTION_DECL)
741 {
742 inform (DECL_SOURCE_LOCATION (decl2),
743 "a different type is defined in another translation unit");
744 inform (DECL_SOURCE_LOCATION (st1),
745 "the first difference of corresponding definitions is method %qD",
746 st1);
747 decl2 = st2;
748 }
749 else
750 return;
751 inform (DECL_SOURCE_LOCATION (decl2), reason);
752
753 if (warned)
754 *warned = true;
755 }
756
757 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
758 because they are used on same place in ODR matching types.
759 They are not; inform the user. */
760
761 void
762 warn_types_mismatch (tree t1, tree t2)
763 {
764 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
765 return;
766 /* In Firefox it is a common bug to have same types but in
767 different namespaces. Be a bit more informative on
768 this. */
769 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
770 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
771 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
772 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
773 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
774 DECL_NAME (TYPE_CONTEXT (t2))))))
775 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
776 "type %qT should match type %qT but is defined "
777 "in different namespace ",
778 t1, t2);
779 else
780 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
781 "type %qT should match type %qT",
782 t1, t2);
783 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
784 "the incompatible type is defined here");
785 }
786
787 /* Compare T1 and T2, report ODR violations if WARN is true and set
788 WARNED to true if anything is reported. Return true if types match.
789 If true is returned, the types are also compatible in the sense of
790 gimple_canonical_types_compatible_p. */
791
792 static bool
793 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<type_pair,pair_traits> *visited)
794 {
795 /* Check first for the obvious case of pointer identity. */
796 if (t1 == t2)
797 return true;
798 gcc_assert (!type_in_anonymous_namespace_p (t1));
799 gcc_assert (!type_in_anonymous_namespace_p (t2));
800
801 /* Can't be the same type if the types don't have the same code. */
802 if (TREE_CODE (t1) != TREE_CODE (t2))
803 {
804 warn_odr (t1, t2, NULL, NULL, warn, warned,
805 G_("a different type is defined in another translation unit"));
806 return false;
807 }
808
809 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
810 {
811 warn_odr (t1, t2, NULL, NULL, warn, warned,
812 G_("a type with different qualifiers is defined in another "
813 "translation unit"));
814 return false;
815 }
816
817 if (comp_type_attributes (t1, t2) != 1)
818 {
819 warn_odr (t1, t2, NULL, NULL, warn, warned,
820 G_("a type with attributes "
821 "is defined in another translation unit"));
822 return false;
823 }
824
825 if (TREE_CODE (t1) == ENUMERAL_TYPE)
826 {
827 tree v1, v2;
828 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
829 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
830 {
831 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
832 {
833 warn_odr (t1, t2, NULL, NULL, warn, warned,
834 G_("an enum with different value name"
835 " is defined in another translation unit"));
836 return false;
837 }
838 if (TREE_VALUE (v1) != TREE_VALUE (v2)
839 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
840 DECL_INITIAL (TREE_VALUE (v2)), 0))
841 {
842 warn_odr (t1, t2, NULL, NULL, warn, warned,
843 G_("an enum with different values is defined"
844 " in another translation unit"));
845 return false;
846 }
847 }
848 if (v1 || v2)
849 {
850 warn_odr (t1, t2, NULL, NULL, warn, warned,
851 G_("an enum with mismatching number of values "
852 "is defined in another translation unit"));
853 return false;
854 }
855 }
856
857 /* Non-aggregate types can be handled cheaply. */
858 if (INTEGRAL_TYPE_P (t1)
859 || SCALAR_FLOAT_TYPE_P (t1)
860 || FIXED_POINT_TYPE_P (t1)
861 || TREE_CODE (t1) == VECTOR_TYPE
862 || TREE_CODE (t1) == COMPLEX_TYPE
863 || TREE_CODE (t1) == OFFSET_TYPE
864 || POINTER_TYPE_P (t1))
865 {
866 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
867 {
868 warn_odr (t1, t2, NULL, NULL, warn, warned,
869 G_("a type with different precision is defined "
870 "in another translation unit"));
871 return false;
872 }
873 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
874 {
875 warn_odr (t1, t2, NULL, NULL, warn, warned,
876 G_("a type with different signedness is defined "
877 "in another translation unit"));
878 return false;
879 }
880
881 if (TREE_CODE (t1) == INTEGER_TYPE
882 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
883 {
884 /* char WRT uint_8? */
885 warn_odr (t1, t2, NULL, NULL, warn, warned,
886 G_("a different type is defined in another "
887 "translation unit"));
888 return false;
889 }
890
891 /* For canonical type comparisons we do not want to build SCCs
892 so we cannot compare pointed-to types. But we can, for now,
893 require the same pointed-to type kind and match what
894 useless_type_conversion_p would do. */
895 if (POINTER_TYPE_P (t1))
896 {
897 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
898 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
899 {
900 warn_odr (t1, t2, NULL, NULL, warn, warned,
901 G_("it is defined as a pointer in different address "
902 "space in another translation unit"));
903 return false;
904 }
905
906 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
907 {
908 warn_odr (t1, t2, NULL, NULL, warn, warned,
909 G_("it is defined as a pointer to different type "
910 "in another translation unit"));
911 if (warn && warned)
912 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
913 return false;
914 }
915 }
916
917 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
918 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
919 {
920 /* Probably specific enough. */
921 warn_odr (t1, t2, NULL, NULL, warn, warned,
922 G_("a different type is defined "
923 "in another translation unit"));
924 if (warn && warned)
925 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
926 return false;
927 }
928 }
929 /* Do type-specific comparisons. */
930 else switch (TREE_CODE (t1))
931 {
932 case ARRAY_TYPE:
933 {
934 /* Array types are the same if the element types are the same and
935 the number of elements are the same. */
936 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
937 {
938 warn_odr (t1, t2, NULL, NULL, warn, warned,
939 G_("a different type is defined in another "
940 "translation unit"));
941 if (warn && warned)
942 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
943 }
944 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
945 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
946 == TYPE_NONALIASED_COMPONENT (t2));
947
948 tree i1 = TYPE_DOMAIN (t1);
949 tree i2 = TYPE_DOMAIN (t2);
950
951 /* For an incomplete external array, the type domain can be
952 NULL_TREE. Check this condition also. */
953 if (i1 == NULL_TREE || i2 == NULL_TREE)
954 return true;
955
956 tree min1 = TYPE_MIN_VALUE (i1);
957 tree min2 = TYPE_MIN_VALUE (i2);
958 tree max1 = TYPE_MAX_VALUE (i1);
959 tree max2 = TYPE_MAX_VALUE (i2);
960
961 /* In C++, minimums should be always 0. */
962 gcc_assert (min1 == min2);
963 if (!operand_equal_p (max1, max2, 0))
964 {
965 warn_odr (t1, t2, NULL, NULL, warn, warned,
966 G_("an array of different size is defined "
967 "in another translation unit"));
968 return false;
969 }
970 }
971 break;
972
973 case METHOD_TYPE:
974 case FUNCTION_TYPE:
975 /* Function types are the same if the return type and arguments types
976 are the same. */
977 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
978 {
979 warn_odr (t1, t2, NULL, NULL, warn, warned,
980 G_("has different return value "
981 "in another translation unit"));
982 if (warn && warned)
983 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
984 return false;
985 }
986
987 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
988 return true;
989 else
990 {
991 tree parms1, parms2;
992
993 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
994 parms1 && parms2;
995 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
996 {
997 if (!odr_subtypes_equivalent_p
998 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
999 {
1000 warn_odr (t1, t2, NULL, NULL, warn, warned,
1001 G_("has different parameters in another "
1002 "translation unit"));
1003 if (warn && warned)
1004 warn_types_mismatch (TREE_VALUE (parms1),
1005 TREE_VALUE (parms2));
1006 return false;
1007 }
1008 }
1009
1010 if (parms1 || parms2)
1011 {
1012 warn_odr (t1, t2, NULL, NULL, warn, warned,
1013 G_("has different parameters "
1014 "in another translation unit"));
1015 return false;
1016 }
1017
1018 return true;
1019 }
1020
1021 case RECORD_TYPE:
1022 case UNION_TYPE:
1023 case QUAL_UNION_TYPE:
1024 {
1025 tree f1, f2;
1026
1027 /* For aggregate types, all the fields must be the same. */
1028 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1029 {
1030 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1031 f1 || f2;
1032 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1033 {
1034 /* Skip non-fields. */
1035 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1036 f1 = TREE_CHAIN (f1);
1037 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1038 f2 = TREE_CHAIN (f2);
1039 if (!f1 || !f2)
1040 break;
1041 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1042 break;
1043 if (DECL_NAME (f1) != DECL_NAME (f2)
1044 && !DECL_ARTIFICIAL (f1))
1045 {
1046 warn_odr (t1, t2, f1, f2, warn, warned,
1047 G_("a field with different name is defined "
1048 "in another translation unit"));
1049 return false;
1050 }
1051 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1052 {
1053 /* Do not warn about artificial fields and just go into generic
1054 field mismatch warning. */
1055 if (DECL_ARTIFICIAL (f1))
1056 break;
1057
1058 warn_odr (t1, t2, f1, f2, warn, warned,
1059 G_("a field of same name but different type "
1060 "is defined in another translation unit"));
1061 if (warn && warned)
1062 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1063 return false;
1064 }
1065 if (!gimple_compare_field_offset (f1, f2))
1066 {
1067 /* Do not warn about artificial fields and just go into generic
1068 field mismatch warning. */
1069 if (DECL_ARTIFICIAL (f1))
1070 break;
1071 warn_odr (t1, t2, t1, t2, warn, warned,
1072 G_("fields has different layout "
1073 "in another translation unit"));
1074 return false;
1075 }
1076 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1077 == DECL_NONADDRESSABLE_P (f2));
1078 }
1079
1080 /* If one aggregate has more fields than the other, they
1081 are not the same. */
1082 if (f1 || f2)
1083 {
1084 if (f1 && DECL_ARTIFICIAL (f1))
1085 f1 = NULL;
1086 if (f2 && DECL_ARTIFICIAL (f2))
1087 f2 = NULL;
1088 if (f1 || f2)
1089 warn_odr (t1, t2, f1, f2, warn, warned,
1090 G_("a type with different number of fields "
1091 "is defined in another translation unit"));
1092 /* Ideally we should never get this generic message. */
1093 else
1094 warn_odr (t1, t2, f1, f2, warn, warned,
1095 G_("a type with different memory representation "
1096 "is defined in another translation unit"));
1097
1098 return false;
1099 }
1100 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1101 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1102 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1103 {
1104 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1105 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1106 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1107 {
1108 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1109 {
1110 warn_odr (t1, t2, f1, f2, warn, warned,
1111 G_("a different method of same type "
1112 "is defined in another translation unit"));
1113 return false;
1114 }
1115 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1116 {
1117 warn_odr (t1, t2, f1, f2, warn, warned,
1118 G_("s definition that differs by virtual "
1119 "keyword in another translation unit"));
1120 return false;
1121 }
1122 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1123 {
1124 warn_odr (t1, t2, f1, f2, warn, warned,
1125 G_("virtual table layout differs in another "
1126 "translation unit"));
1127 return false;
1128 }
1129 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1130 {
1131 warn_odr (t1, t2, f1, f2, warn, warned,
1132 G_("method with incompatible type is defined "
1133 "in another translation unit"));
1134 return false;
1135 }
1136 }
1137 if (f1 || f2)
1138 {
1139 warn_odr (t1, t2, NULL, NULL, warn, warned,
1140 G_("a type with different number of methods "
1141 "is defined in another translation unit"));
1142 return false;
1143 }
1144 }
1145 }
1146 break;
1147 }
1148 case VOID_TYPE:
1149 break;
1150
1151 default:
1152 debug_tree (t1);
1153 gcc_unreachable ();
1154 }
1155
1156 /* Those are better to come last as they are utterly uninformative. */
1157 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1158 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1159 {
1160 warn_odr (t1, t2, NULL, NULL, warn, warned,
1161 G_("a type with different size "
1162 "is defined in another translation unit"));
1163 return false;
1164 }
1165 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1166 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1167 {
1168 warn_odr (t1, t2, NULL, NULL, warn, warned,
1169 G_("a type with different alignment "
1170 "is defined in another translation unit"));
1171 return false;
1172 }
1173 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1174 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1175 TYPE_SIZE_UNIT (t2), 0));
1176 return true;
1177 }
1178
1179 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1180 from VAL->type. This may happen in LTO where tree merging did not merge
1181 all variants of the same type. It may or may not mean the ODR violation.
1182 Add it to the list of duplicates and warn on some violations. */
1183
1184 static bool
1185 add_type_duplicate (odr_type val, tree type)
1186 {
1187 bool build_bases = false;
1188 if (!val->types_set)
1189 val->types_set = new hash_set<tree>;
1190
1191 /* Always prefer complete type to be the leader. */
1192 if (!COMPLETE_TYPE_P (val->type)
1193 && COMPLETE_TYPE_P (type))
1194 {
1195 tree tmp = type;
1196
1197 build_bases = true;
1198 type = val->type;
1199 val->type = tmp;
1200 }
1201
1202 /* See if this duplicate is new. */
1203 if (!val->types_set->add (type))
1204 {
1205 bool merge = true;
1206 bool base_mismatch = false;
1207 unsigned int i,j;
1208 bool warned = false;
1209 hash_set<type_pair,pair_traits> visited;
1210
1211 gcc_assert (in_lto_p);
1212 vec_safe_push (val->types, type);
1213
1214 /* First we compare memory layout. */
1215 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1216 &warned, &visited))
1217 {
1218 merge = false;
1219 odr_violation_reported = true;
1220 val->odr_violated = true;
1221 if (symtab->dump_file)
1222 {
1223 fprintf (symtab->dump_file, "ODR violation\n");
1224
1225 print_node (symtab->dump_file, "", val->type, 0);
1226 putc ('\n',symtab->dump_file);
1227 print_node (symtab->dump_file, "", type, 0);
1228 putc ('\n',symtab->dump_file);
1229 }
1230 }
1231
1232 /* Next sanity check that bases are the same. If not, we will end
1233 up producing wrong answers. */
1234 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1235 && TREE_CODE (val->type) == RECORD_TYPE
1236 && TREE_CODE (type) == RECORD_TYPE
1237 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1238 {
1239 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1240 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1241 {
1242 odr_type base = get_odr_type
1243 (BINFO_TYPE
1244 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1245 i)),
1246 true);
1247 if (val->bases.length () <= j || val->bases[j] != base)
1248 base_mismatch = true;
1249 j++;
1250 }
1251 if (base_mismatch)
1252 {
1253 merge = false;
1254 odr_violation_reported = true;
1255
1256 if (!warned && !val->odr_violated)
1257 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1258 "a type with the same name but different bases is "
1259 "defined in another translation unit");
1260 val->odr_violated = true;
1261 if (symtab->dump_file)
1262 {
1263 fprintf (symtab->dump_file, "ODR bse violation or merging bug?\n");
1264
1265 print_node (symtab->dump_file, "", val->type, 0);
1266 putc ('\n',symtab->dump_file);
1267 print_node (symtab->dump_file, "", type, 0);
1268 putc ('\n',symtab->dump_file);
1269 }
1270 }
1271 }
1272
1273 /* Regularize things a little. During LTO same types may come with
1274 different BINFOs. Either because their virtual table was
1275 not merged by tree merging and only later at decl merging or
1276 because one type comes with external vtable, while other
1277 with internal. We want to merge equivalent binfos to conserve
1278 memory and streaming overhead.
1279
1280 The external vtables are more harmful: they contain references
1281 to external declarations of methods that may be defined in the
1282 merged LTO unit. For this reason we absolutely need to remove
1283 them and replace by internal variants. Not doing so will lead
1284 to incomplete answers from possible_polymorphic_call_targets.
1285
1286 FIXME: disable for now; because ODR types are now build during
1287 streaming in, the variants do not need to be linked to the type,
1288 yet. We need to do the merging in cleanup pass to be implemented
1289 soon. */
1290 if (!flag_ltrans && merge
1291 && 0
1292 && TREE_CODE (val->type) == RECORD_TYPE
1293 && TREE_CODE (type) == RECORD_TYPE
1294 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1295 && TYPE_MAIN_VARIANT (type) == type
1296 && TYPE_MAIN_VARIANT (val->type) == val->type
1297 && BINFO_VTABLE (TYPE_BINFO (val->type))
1298 && BINFO_VTABLE (TYPE_BINFO (type)))
1299 {
1300 tree master_binfo = TYPE_BINFO (val->type);
1301 tree v1 = BINFO_VTABLE (master_binfo);
1302 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1303
1304 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1305 {
1306 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1307 && operand_equal_p (TREE_OPERAND (v1, 1),
1308 TREE_OPERAND (v2, 1), 0));
1309 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1310 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1311 }
1312 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1313 == DECL_ASSEMBLER_NAME (v2));
1314
1315 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1316 {
1317 unsigned int i;
1318
1319 set_type_binfo (val->type, TYPE_BINFO (type));
1320 for (i = 0; i < val->types->length (); i++)
1321 {
1322 if (TYPE_BINFO ((*val->types)[i])
1323 == master_binfo)
1324 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1325 }
1326 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1327 }
1328 else
1329 set_type_binfo (type, master_binfo);
1330 }
1331 }
1332 return build_bases;
1333 }
1334
1335 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1336 possibly new entry. */
1337
1338 odr_type
1339 get_odr_type (tree type, bool insert)
1340 {
1341 odr_type_d **slot;
1342 odr_type val;
1343 hashval_t hash;
1344 bool build_bases = false;
1345 bool insert_to_odr_array = false;
1346 int base_id = -1;
1347
1348 type = main_odr_variant (type);
1349
1350 hash = hash_type_name (type);
1351 slot = odr_hash->find_slot_with_hash (type, hash,
1352 insert ? INSERT : NO_INSERT);
1353 if (!slot)
1354 return NULL;
1355
1356 /* See if we already have entry for type. */
1357 if (*slot)
1358 {
1359 val = *slot;
1360
1361 /* With LTO we need to support multiple tree representation of
1362 the same ODR type. */
1363 if (val->type != type)
1364 build_bases = add_type_duplicate (val, type);
1365 }
1366 else
1367 {
1368 val = ggc_cleared_alloc<odr_type_d> ();
1369 val->type = type;
1370 val->bases = vNULL;
1371 val->derived_types = vNULL;
1372 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1373 build_bases = COMPLETE_TYPE_P (val->type);
1374 insert_to_odr_array = true;
1375 }
1376
1377 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1378 && type == TYPE_MAIN_VARIANT (type))
1379 {
1380 tree binfo = TYPE_BINFO (type);
1381 unsigned int i;
1382
1383 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1384
1385 val->all_derivations_known = type_all_derivations_known_p (type);
1386 *slot = val;
1387 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1388 /* For now record only polymorphic types. other are
1389 pointless for devirtualization and we can not precisely
1390 determine ODR equivalency of these during LTO. */
1391 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1392 {
1393 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1394 i)),
1395 true);
1396 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1397 base->derived_types.safe_push (val);
1398 val->bases.safe_push (base);
1399 if (base->id > base_id)
1400 base_id = base->id;
1401 }
1402 }
1403 /* Ensure that type always appears after bases. */
1404 if (insert_to_odr_array)
1405 {
1406 if (odr_types_ptr)
1407 val->id = odr_types.length ();
1408 vec_safe_push (odr_types_ptr, val);
1409 }
1410 else if (base_id > val->id)
1411 {
1412 odr_types[val->id] = 0;
1413 /* Be sure we did not recorded any derived types; these may need
1414 renumbering too. */
1415 gcc_assert (val->derived_types.length() == 0);
1416 if (odr_types_ptr)
1417 val->id = odr_types.length ();
1418 vec_safe_push (odr_types_ptr, val);
1419 }
1420 return val;
1421 }
1422
1423 /* Add TYPE od ODR type hash. */
1424
1425 void
1426 register_odr_type (tree type)
1427 {
1428 if (!odr_hash)
1429 odr_hash = new odr_hash_type (23);
1430 /* Arrange things to be nicer and insert main variants first. */
1431 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1432 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1433 if (TYPE_MAIN_VARIANT (type) != type)
1434 get_odr_type (type, true);
1435 }
1436
1437 /* Return true if type is known to have no derivations. */
1438
1439 bool
1440 type_known_to_have_no_deriavations_p (tree t)
1441 {
1442 return (type_all_derivations_known_p (t)
1443 && (TYPE_FINAL_P (t)
1444 || (odr_hash
1445 && !get_odr_type (t, true)->derived_types.length())));
1446 }
1447
1448 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1449 recusive printing. */
1450
1451 static void
1452 dump_odr_type (FILE *f, odr_type t, int indent=0)
1453 {
1454 unsigned int i;
1455 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1456 print_generic_expr (f, t->type, TDF_SLIM);
1457 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1458 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1459 if (TYPE_NAME (t->type))
1460 {
1461 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1462 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1463 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1464 }
1465 if (t->bases.length ())
1466 {
1467 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1468 for (i = 0; i < t->bases.length (); i++)
1469 fprintf (f, " %i", t->bases[i]->id);
1470 fprintf (f, "\n");
1471 }
1472 if (t->derived_types.length ())
1473 {
1474 fprintf (f, "%*s derived types:\n", indent * 2, "");
1475 for (i = 0; i < t->derived_types.length (); i++)
1476 dump_odr_type (f, t->derived_types[i], indent + 1);
1477 }
1478 fprintf (f, "\n");
1479 }
1480
1481 /* Dump the type inheritance graph. */
1482
1483 static void
1484 dump_type_inheritance_graph (FILE *f)
1485 {
1486 unsigned int i;
1487 if (!odr_types_ptr)
1488 return;
1489 fprintf (f, "\n\nType inheritance graph:\n");
1490 for (i = 0; i < odr_types.length (); i++)
1491 {
1492 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1493 dump_odr_type (f, odr_types[i]);
1494 }
1495 for (i = 0; i < odr_types.length (); i++)
1496 {
1497 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1498 {
1499 unsigned int j;
1500 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1501 print_node (f, "", odr_types[i]->type, 0);
1502 for (j = 0; j < odr_types[i]->types->length (); j++)
1503 {
1504 tree t;
1505 fprintf (f, "duplicate #%i\n", j);
1506 print_node (f, "", (*odr_types[i]->types)[j], 0);
1507 t = (*odr_types[i]->types)[j];
1508 while (TYPE_P (t) && TYPE_CONTEXT (t))
1509 {
1510 t = TYPE_CONTEXT (t);
1511 print_node (f, "", t, 0);
1512 }
1513 putc ('\n',f);
1514 }
1515 }
1516 }
1517 }
1518
1519 /* Given method type T, return type of class it belongs to.
1520 Lookup this pointer and get its type. */
1521
1522 tree
1523 method_class_type (const_tree t)
1524 {
1525 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1526 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1527
1528 return TREE_TYPE (first_parm_type);
1529 }
1530
1531 /* Initialize IPA devirt and build inheritance tree graph. */
1532
1533 void
1534 build_type_inheritance_graph (void)
1535 {
1536 struct symtab_node *n;
1537 FILE *inheritance_dump_file;
1538 int flags;
1539
1540 if (odr_hash)
1541 return;
1542 timevar_push (TV_IPA_INHERITANCE);
1543 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1544 odr_hash = new odr_hash_type (23);
1545
1546 /* We reconstruct the graph starting of types of all methods seen in the
1547 the unit. */
1548 FOR_EACH_SYMBOL (n)
1549 if (is_a <cgraph_node *> (n)
1550 && DECL_VIRTUAL_P (n->decl)
1551 && n->real_symbol_p ())
1552 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1553 true);
1554
1555 /* Look also for virtual tables of types that do not define any methods.
1556
1557 We need it in a case where class B has virtual base of class A
1558 re-defining its virtual method and there is class C with no virtual
1559 methods with B as virtual base.
1560
1561 Here we output B's virtual method in two variant - for non-virtual
1562 and virtual inheritance. B's virtual table has non-virtual version,
1563 while C's has virtual.
1564
1565 For this reason we need to know about C in order to include both
1566 variants of B. More correctly, record_target_from_binfo should
1567 add both variants of the method when walking B, but we have no
1568 link in between them.
1569
1570 We rely on fact that either the method is exported and thus we
1571 assume it is called externally or C is in anonymous namespace and
1572 thus we will see the vtable. */
1573
1574 else if (is_a <varpool_node *> (n)
1575 && DECL_VIRTUAL_P (n->decl)
1576 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1577 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1578 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1579 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1580 if (inheritance_dump_file)
1581 {
1582 dump_type_inheritance_graph (inheritance_dump_file);
1583 dump_end (TDI_inheritance, inheritance_dump_file);
1584 }
1585 timevar_pop (TV_IPA_INHERITANCE);
1586 }
1587
1588 /* Return true if N has reference from live virtual table
1589 (and thus can be a destination of polymorphic call).
1590 Be conservatively correct when callgraph is not built or
1591 if the method may be referred externally. */
1592
1593 static bool
1594 referenced_from_vtable_p (struct cgraph_node *node)
1595 {
1596 int i;
1597 struct ipa_ref *ref;
1598 bool found = false;
1599
1600 if (node->externally_visible
1601 || DECL_EXTERNAL (node->decl)
1602 || node->used_from_other_partition)
1603 return true;
1604
1605 /* Keep this test constant time.
1606 It is unlikely this can happen except for the case where speculative
1607 devirtualization introduced many speculative edges to this node.
1608 In this case the target is very likely alive anyway. */
1609 if (node->ref_list.referring.length () > 100)
1610 return true;
1611
1612 /* We need references built. */
1613 if (symtab->state <= CONSTRUCTION)
1614 return true;
1615
1616 for (i = 0; node->iterate_referring (i, ref); i++)
1617
1618 if ((ref->use == IPA_REF_ALIAS
1619 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1620 || (ref->use == IPA_REF_ADDR
1621 && TREE_CODE (ref->referring->decl) == VAR_DECL
1622 && DECL_VIRTUAL_P (ref->referring->decl)))
1623 {
1624 found = true;
1625 break;
1626 }
1627 return found;
1628 }
1629
1630 /* If TARGET has associated node, record it in the NODES array.
1631 CAN_REFER specify if program can refer to the target directly.
1632 if TARGET is unknown (NULL) or it can not be inserted (for example because
1633 its body was already removed and there is no way to refer to it), clear
1634 COMPLETEP. */
1635
1636 static void
1637 maybe_record_node (vec <cgraph_node *> &nodes,
1638 tree target, hash_set<tree> *inserted,
1639 bool can_refer,
1640 bool *completep)
1641 {
1642 struct cgraph_node *target_node, *alias_target;
1643 enum availability avail;
1644
1645 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1646 list of targets; the runtime effect of calling them is undefined.
1647 Only "real" virtual methods should be accounted. */
1648 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1649 return;
1650
1651 if (!can_refer)
1652 {
1653 /* The only case when method of anonymous namespace becomes unreferable
1654 is when we completely optimized it out. */
1655 if (flag_ltrans
1656 || !target
1657 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1658 *completep = false;
1659 return;
1660 }
1661
1662 if (!target)
1663 return;
1664
1665 target_node = cgraph_node::get (target);
1666
1667 /* Preffer alias target over aliases, so we do not get confused by
1668 fake duplicates. */
1669 if (target_node)
1670 {
1671 alias_target = target_node->ultimate_alias_target (&avail);
1672 if (target_node != alias_target
1673 && avail >= AVAIL_AVAILABLE
1674 && target_node->get_availability ())
1675 target_node = alias_target;
1676 }
1677
1678 /* Method can only be called by polymorphic call if any
1679 of vtables refering to it are alive.
1680
1681 While this holds for non-anonymous functions, too, there are
1682 cases where we want to keep them in the list; for example
1683 inline functions with -fno-weak are static, but we still
1684 may devirtualize them when instance comes from other unit.
1685 The same holds for LTO.
1686
1687 Currently we ignore these functions in speculative devirtualization.
1688 ??? Maybe it would make sense to be more aggressive for LTO even
1689 eslewhere. */
1690 if (!flag_ltrans
1691 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1692 && (!target_node
1693 || !referenced_from_vtable_p (target_node)))
1694 ;
1695 /* See if TARGET is useful function we can deal with. */
1696 else if (target_node != NULL
1697 && (TREE_PUBLIC (target)
1698 || DECL_EXTERNAL (target)
1699 || target_node->definition)
1700 && target_node->real_symbol_p ())
1701 {
1702 gcc_assert (!target_node->global.inlined_to);
1703 gcc_assert (target_node->real_symbol_p ());
1704 if (!inserted->add (target))
1705 {
1706 cached_polymorphic_call_targets->add (target_node);
1707 nodes.safe_push (target_node);
1708 }
1709 }
1710 else if (completep
1711 && (!type_in_anonymous_namespace_p
1712 (DECL_CONTEXT (target))
1713 || flag_ltrans))
1714 *completep = false;
1715 }
1716
1717 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1718 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1719 method in vtable and insert method to NODES array
1720 or BASES_TO_CONSIDER if this array is non-NULL.
1721 Otherwise recurse to base BINFOs.
1722 This match what get_binfo_at_offset does, but with offset
1723 being unknown.
1724
1725 TYPE_BINFOS is a stack of BINFOS of types with defined
1726 virtual table seen on way from class type to BINFO.
1727
1728 MATCHED_VTABLES tracks virtual tables we already did lookup
1729 for virtual function in. INSERTED tracks nodes we already
1730 inserted.
1731
1732 ANONYMOUS is true if BINFO is part of anonymous namespace.
1733
1734 Clear COMPLETEP when we hit unreferable target.
1735 */
1736
1737 static void
1738 record_target_from_binfo (vec <cgraph_node *> &nodes,
1739 vec <tree> *bases_to_consider,
1740 tree binfo,
1741 tree otr_type,
1742 vec <tree> &type_binfos,
1743 HOST_WIDE_INT otr_token,
1744 tree outer_type,
1745 HOST_WIDE_INT offset,
1746 hash_set<tree> *inserted,
1747 hash_set<tree> *matched_vtables,
1748 bool anonymous,
1749 bool *completep)
1750 {
1751 tree type = BINFO_TYPE (binfo);
1752 int i;
1753 tree base_binfo;
1754
1755
1756 if (BINFO_VTABLE (binfo))
1757 type_binfos.safe_push (binfo);
1758 if (types_same_for_odr (type, outer_type))
1759 {
1760 int i;
1761 tree type_binfo = NULL;
1762
1763 /* Lookup BINFO with virtual table. For normal types it is always last
1764 binfo on stack. */
1765 for (i = type_binfos.length () - 1; i >= 0; i--)
1766 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1767 {
1768 type_binfo = type_binfos[i];
1769 break;
1770 }
1771 if (BINFO_VTABLE (binfo))
1772 type_binfos.pop ();
1773 /* If this is duplicated BINFO for base shared by virtual inheritance,
1774 we may not have its associated vtable. This is not a problem, since
1775 we will walk it on the other path. */
1776 if (!type_binfo)
1777 return;
1778 tree inner_binfo = get_binfo_at_offset (type_binfo,
1779 offset, otr_type);
1780 if (!inner_binfo)
1781 {
1782 gcc_assert (odr_violation_reported);
1783 return;
1784 }
1785 /* For types in anonymous namespace first check if the respective vtable
1786 is alive. If not, we know the type can't be called. */
1787 if (!flag_ltrans && anonymous)
1788 {
1789 tree vtable = BINFO_VTABLE (inner_binfo);
1790 varpool_node *vnode;
1791
1792 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1793 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1794 vnode = varpool_node::get (vtable);
1795 if (!vnode || !vnode->definition)
1796 return;
1797 }
1798 gcc_assert (inner_binfo);
1799 if (bases_to_consider
1800 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1801 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1802 {
1803 bool can_refer;
1804 tree target = gimple_get_virt_method_for_binfo (otr_token,
1805 inner_binfo,
1806 &can_refer);
1807 if (!bases_to_consider)
1808 maybe_record_node (nodes, target, inserted, can_refer, completep);
1809 /* Destructors are never called via construction vtables. */
1810 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1811 bases_to_consider->safe_push (target);
1812 }
1813 return;
1814 }
1815
1816 /* Walk bases. */
1817 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1818 /* Walking bases that have no virtual method is pointless excercise. */
1819 if (polymorphic_type_binfo_p (base_binfo))
1820 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1821 type_binfos,
1822 otr_token, outer_type, offset, inserted,
1823 matched_vtables, anonymous, completep);
1824 if (BINFO_VTABLE (binfo))
1825 type_binfos.pop ();
1826 }
1827
1828 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1829 of TYPE, insert them to NODES, recurse into derived nodes.
1830 INSERTED is used to avoid duplicate insertions of methods into NODES.
1831 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1832 Clear COMPLETEP if unreferable target is found.
1833
1834 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1835 all cases where BASE_SKIPPED is true (because the base is abstract
1836 class). */
1837
1838 static void
1839 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1840 hash_set<tree> *inserted,
1841 hash_set<tree> *matched_vtables,
1842 tree otr_type,
1843 odr_type type,
1844 HOST_WIDE_INT otr_token,
1845 tree outer_type,
1846 HOST_WIDE_INT offset,
1847 bool *completep,
1848 vec <tree> &bases_to_consider,
1849 bool consider_construction)
1850 {
1851 tree binfo = TYPE_BINFO (type->type);
1852 unsigned int i;
1853 auto_vec <tree, 8> type_binfos;
1854 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1855
1856 /* We may need to consider types w/o instances because of possible derived
1857 types using their methods either directly or via construction vtables.
1858 We are safe to skip them when all derivations are known, since we will
1859 handle them later.
1860 This is done by recording them to BASES_TO_CONSIDER array. */
1861 if (possibly_instantiated || consider_construction)
1862 {
1863 record_target_from_binfo (nodes,
1864 (!possibly_instantiated
1865 && type_all_derivations_known_p (type->type))
1866 ? &bases_to_consider : NULL,
1867 binfo, otr_type, type_binfos, otr_token,
1868 outer_type, offset,
1869 inserted, matched_vtables,
1870 type->anonymous_namespace, completep);
1871 }
1872 for (i = 0; i < type->derived_types.length (); i++)
1873 possible_polymorphic_call_targets_1 (nodes, inserted,
1874 matched_vtables,
1875 otr_type,
1876 type->derived_types[i],
1877 otr_token, outer_type, offset, completep,
1878 bases_to_consider, consider_construction);
1879 }
1880
1881 /* Cache of queries for polymorphic call targets.
1882
1883 Enumerating all call targets may get expensive when there are many
1884 polymorphic calls in the program, so we memoize all the previous
1885 queries and avoid duplicated work. */
1886
1887 struct polymorphic_call_target_d
1888 {
1889 HOST_WIDE_INT otr_token;
1890 ipa_polymorphic_call_context context;
1891 odr_type type;
1892 vec <cgraph_node *> targets;
1893 tree decl_warning;
1894 int type_warning;
1895 bool complete;
1896 bool speculative;
1897 };
1898
1899 /* Polymorphic call target cache helpers. */
1900
1901 struct polymorphic_call_target_hasher
1902 {
1903 typedef polymorphic_call_target_d value_type;
1904 typedef polymorphic_call_target_d compare_type;
1905 static inline hashval_t hash (const value_type *);
1906 static inline bool equal (const value_type *, const compare_type *);
1907 static inline void remove (value_type *);
1908 };
1909
1910 /* Return the computed hashcode for ODR_QUERY. */
1911
1912 inline hashval_t
1913 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1914 {
1915 inchash::hash hstate (odr_query->otr_token);
1916
1917 hstate.add_wide_int (odr_query->type->id);
1918 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1919 hstate.add_wide_int (odr_query->context.offset);
1920
1921 if (odr_query->context.speculative_outer_type)
1922 {
1923 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1924 hstate.add_wide_int (odr_query->context.speculative_offset);
1925 }
1926 hstate.add_flag (odr_query->speculative);
1927 hstate.add_flag (odr_query->context.maybe_in_construction);
1928 hstate.add_flag (odr_query->context.maybe_derived_type);
1929 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1930 hstate.commit_flag ();
1931 return hstate.end ();
1932 }
1933
1934 /* Compare cache entries T1 and T2. */
1935
1936 inline bool
1937 polymorphic_call_target_hasher::equal (const value_type *t1,
1938 const compare_type *t2)
1939 {
1940 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1941 && t1->speculative == t2->speculative
1942 && t1->context.offset == t2->context.offset
1943 && t1->context.speculative_offset == t2->context.speculative_offset
1944 && t1->context.outer_type == t2->context.outer_type
1945 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1946 && t1->context.maybe_in_construction
1947 == t2->context.maybe_in_construction
1948 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1949 && (t1->context.speculative_maybe_derived_type
1950 == t2->context.speculative_maybe_derived_type));
1951 }
1952
1953 /* Remove entry in polymorphic call target cache hash. */
1954
1955 inline void
1956 polymorphic_call_target_hasher::remove (value_type *v)
1957 {
1958 v->targets.release ();
1959 free (v);
1960 }
1961
1962 /* Polymorphic call target query cache. */
1963
1964 typedef hash_table<polymorphic_call_target_hasher>
1965 polymorphic_call_target_hash_type;
1966 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1967
1968 /* Destroy polymorphic call target query cache. */
1969
1970 static void
1971 free_polymorphic_call_targets_hash ()
1972 {
1973 if (cached_polymorphic_call_targets)
1974 {
1975 delete polymorphic_call_target_hash;
1976 polymorphic_call_target_hash = NULL;
1977 delete cached_polymorphic_call_targets;
1978 cached_polymorphic_call_targets = NULL;
1979 }
1980 }
1981
1982 /* When virtual function is removed, we may need to flush the cache. */
1983
1984 static void
1985 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1986 {
1987 if (cached_polymorphic_call_targets
1988 && cached_polymorphic_call_targets->contains (n))
1989 free_polymorphic_call_targets_hash ();
1990 }
1991
1992 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1993
1994 tree
1995 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1996 tree vtable)
1997 {
1998 tree v = BINFO_VTABLE (binfo);
1999 int i;
2000 tree base_binfo;
2001 unsigned HOST_WIDE_INT this_offset;
2002
2003 if (v)
2004 {
2005 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2006 gcc_unreachable ();
2007
2008 if (offset == this_offset
2009 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2010 return binfo;
2011 }
2012
2013 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2014 if (polymorphic_type_binfo_p (base_binfo))
2015 {
2016 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2017 if (base_binfo)
2018 return base_binfo;
2019 }
2020 return NULL;
2021 }
2022
2023 /* T is known constant value of virtual table pointer.
2024 Store virtual table to V and its offset to OFFSET.
2025 Return false if T does not look like virtual table reference. */
2026
2027 bool
2028 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2029 unsigned HOST_WIDE_INT *offset)
2030 {
2031 /* We expect &MEM[(void *)&virtual_table + 16B].
2032 We obtain object's BINFO from the context of the virtual table.
2033 This one contains pointer to virtual table represented via
2034 POINTER_PLUS_EXPR. Verify that this pointer match to what
2035 we propagated through.
2036
2037 In the case of virtual inheritance, the virtual tables may
2038 be nested, i.e. the offset may be different from 16 and we may
2039 need to dive into the type representation. */
2040 if (TREE_CODE (t) == ADDR_EXPR
2041 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2042 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2043 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2044 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2045 == VAR_DECL)
2046 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2047 (TREE_OPERAND (t, 0), 0), 0)))
2048 {
2049 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2050 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2051 return true;
2052 }
2053
2054 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2055 We need to handle it when T comes from static variable initializer or
2056 BINFO. */
2057 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2058 {
2059 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2060 t = TREE_OPERAND (t, 0);
2061 }
2062 else
2063 *offset = 0;
2064
2065 if (TREE_CODE (t) != ADDR_EXPR)
2066 return false;
2067 *v = TREE_OPERAND (t, 0);
2068 return true;
2069 }
2070
2071 /* T is known constant value of virtual table pointer. Return BINFO of the
2072 instance type. */
2073
2074 tree
2075 vtable_pointer_value_to_binfo (const_tree t)
2076 {
2077 tree vtable;
2078 unsigned HOST_WIDE_INT offset;
2079
2080 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2081 return NULL_TREE;
2082
2083 /* FIXME: for stores of construction vtables we return NULL,
2084 because we do not have BINFO for those. Eventually we should fix
2085 our representation to allow this case to be handled, too.
2086 In the case we see store of BINFO we however may assume
2087 that standard folding will be ale to cope with it. */
2088 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2089 offset, vtable);
2090 }
2091
2092 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2093 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2094 and insert them to NODES.
2095
2096 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2097
2098 static void
2099 record_targets_from_bases (tree otr_type,
2100 HOST_WIDE_INT otr_token,
2101 tree outer_type,
2102 HOST_WIDE_INT offset,
2103 vec <cgraph_node *> &nodes,
2104 hash_set<tree> *inserted,
2105 hash_set<tree> *matched_vtables,
2106 bool *completep)
2107 {
2108 while (true)
2109 {
2110 HOST_WIDE_INT pos, size;
2111 tree base_binfo;
2112 tree fld;
2113
2114 if (types_same_for_odr (outer_type, otr_type))
2115 return;
2116
2117 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2118 {
2119 if (TREE_CODE (fld) != FIELD_DECL)
2120 continue;
2121
2122 pos = int_bit_position (fld);
2123 size = tree_to_shwi (DECL_SIZE (fld));
2124 if (pos <= offset && (pos + size) > offset
2125 /* Do not get confused by zero sized bases. */
2126 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2127 break;
2128 }
2129 /* Within a class type we should always find correcponding fields. */
2130 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2131
2132 /* Nonbasetypes should have been stripped by outer_class_type. */
2133 gcc_assert (DECL_ARTIFICIAL (fld));
2134
2135 outer_type = TREE_TYPE (fld);
2136 offset -= pos;
2137
2138 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2139 offset, otr_type);
2140 if (!base_binfo)
2141 {
2142 gcc_assert (odr_violation_reported);
2143 return;
2144 }
2145 gcc_assert (base_binfo);
2146 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2147 {
2148 bool can_refer;
2149 tree target = gimple_get_virt_method_for_binfo (otr_token,
2150 base_binfo,
2151 &can_refer);
2152 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2153 maybe_record_node (nodes, target, inserted, can_refer, completep);
2154 matched_vtables->add (BINFO_VTABLE (base_binfo));
2155 }
2156 }
2157 }
2158
2159 /* When virtual table is removed, we may need to flush the cache. */
2160
2161 static void
2162 devirt_variable_node_removal_hook (varpool_node *n,
2163 void *d ATTRIBUTE_UNUSED)
2164 {
2165 if (cached_polymorphic_call_targets
2166 && DECL_VIRTUAL_P (n->decl)
2167 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2168 free_polymorphic_call_targets_hash ();
2169 }
2170
2171 /* Record about how many calls would benefit from given type to be final. */
2172
2173 struct odr_type_warn_count
2174 {
2175 tree type;
2176 int count;
2177 gcov_type dyn_count;
2178 };
2179
2180 /* Record about how many calls would benefit from given method to be final. */
2181
2182 struct decl_warn_count
2183 {
2184 tree decl;
2185 int count;
2186 gcov_type dyn_count;
2187 };
2188
2189 /* Information about type and decl warnings. */
2190
2191 struct final_warning_record
2192 {
2193 gcov_type dyn_count;
2194 vec<odr_type_warn_count> type_warnings;
2195 hash_map<tree, decl_warn_count> decl_warnings;
2196 };
2197 struct final_warning_record *final_warning_records;
2198
2199 /* Return vector containing possible targets of polymorphic call of type
2200 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2201 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2202 OTR_TYPE and include their virtual method. This is useful for types
2203 possibly in construction or destruction where the virtual table may
2204 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2205 us to walk the inheritance graph for all derivations.
2206
2207 If COMPLETEP is non-NULL, store true if the list is complete.
2208 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2209 in the target cache. If user needs to visit every target list
2210 just once, it can memoize them.
2211
2212 If SPECULATIVE is set, the list will not contain targets that
2213 are not speculatively taken.
2214
2215 Returned vector is placed into cache. It is NOT caller's responsibility
2216 to free it. The vector can be freed on cgraph_remove_node call if
2217 the particular node is a virtual function present in the cache. */
2218
2219 vec <cgraph_node *>
2220 possible_polymorphic_call_targets (tree otr_type,
2221 HOST_WIDE_INT otr_token,
2222 ipa_polymorphic_call_context context,
2223 bool *completep,
2224 void **cache_token,
2225 bool speculative)
2226 {
2227 static struct cgraph_node_hook_list *node_removal_hook_holder;
2228 vec <cgraph_node *> nodes = vNULL;
2229 auto_vec <tree, 8> bases_to_consider;
2230 odr_type type, outer_type;
2231 polymorphic_call_target_d key;
2232 polymorphic_call_target_d **slot;
2233 unsigned int i;
2234 tree binfo, target;
2235 bool complete;
2236 bool can_refer = false;
2237 bool skipped = false;
2238
2239 otr_type = TYPE_MAIN_VARIANT (otr_type);
2240
2241 /* If ODR is not initialized or the constext is invalid, return empty
2242 incomplete list. */
2243 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
2244 {
2245 if (completep)
2246 *completep = context.invalid;
2247 if (cache_token)
2248 *cache_token = NULL;
2249 return nodes;
2250 }
2251
2252 /* Do not bother to compute speculative info when user do not asks for it. */
2253 if (!speculative || !context.speculative_outer_type)
2254 context.clear_speculation ();
2255
2256 type = get_odr_type (otr_type, true);
2257
2258 /* Recording type variants would wast results cache. */
2259 gcc_assert (!context.outer_type
2260 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2261
2262 /* Lookup the outer class type we want to walk.
2263 If we fail to do so, the context is invalid. */
2264 if ((context.outer_type || context.speculative_outer_type)
2265 && !context.restrict_to_inner_class (otr_type))
2266 {
2267 if (completep)
2268 *completep = true;
2269 if (cache_token)
2270 *cache_token = NULL;
2271 return nodes;
2272 }
2273 gcc_assert (!context.invalid);
2274
2275 /* Check that restrict_to_inner_class kept the main variant. */
2276 gcc_assert (!context.outer_type
2277 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2278
2279 /* We canonicalize our query, so we do not need extra hashtable entries. */
2280
2281 /* Without outer type, we have no use for offset. Just do the
2282 basic search from innter type */
2283 if (!context.outer_type)
2284 context.clear_outer_type (otr_type);
2285 /* We need to update our hiearchy if the type does not exist. */
2286 outer_type = get_odr_type (context.outer_type, true);
2287 /* If the type is complete, there are no derivations. */
2288 if (TYPE_FINAL_P (outer_type->type))
2289 context.maybe_derived_type = false;
2290
2291 /* Initialize query cache. */
2292 if (!cached_polymorphic_call_targets)
2293 {
2294 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2295 polymorphic_call_target_hash
2296 = new polymorphic_call_target_hash_type (23);
2297 if (!node_removal_hook_holder)
2298 {
2299 node_removal_hook_holder =
2300 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
2301 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
2302 NULL);
2303 }
2304 }
2305
2306 if (in_lto_p)
2307 {
2308 if (context.outer_type != otr_type)
2309 context.outer_type
2310 = get_odr_type (context.outer_type, true)->type;
2311 if (context.speculative_outer_type)
2312 context.speculative_outer_type
2313 = get_odr_type (context.speculative_outer_type, true)->type;
2314 }
2315
2316 /* Lookup cached answer. */
2317 key.type = type;
2318 key.otr_token = otr_token;
2319 key.speculative = speculative;
2320 key.context = context;
2321 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2322 if (cache_token)
2323 *cache_token = (void *)*slot;
2324 if (*slot)
2325 {
2326 if (completep)
2327 *completep = (*slot)->complete;
2328 if ((*slot)->type_warning && final_warning_records)
2329 {
2330 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2331 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2332 += final_warning_records->dyn_count;
2333 }
2334 if (!speculative && (*slot)->decl_warning && final_warning_records)
2335 {
2336 struct decl_warn_count *c =
2337 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2338 c->count++;
2339 c->dyn_count += final_warning_records->dyn_count;
2340 }
2341 return (*slot)->targets;
2342 }
2343
2344 complete = true;
2345
2346 /* Do actual search. */
2347 timevar_push (TV_IPA_VIRTUAL_CALL);
2348 *slot = XCNEW (polymorphic_call_target_d);
2349 if (cache_token)
2350 *cache_token = (void *)*slot;
2351 (*slot)->type = type;
2352 (*slot)->otr_token = otr_token;
2353 (*slot)->context = context;
2354 (*slot)->speculative = speculative;
2355
2356 hash_set<tree> inserted;
2357 hash_set<tree> matched_vtables;
2358
2359 /* First insert targets we speculatively identified as likely. */
2360 if (context.speculative_outer_type)
2361 {
2362 odr_type speculative_outer_type;
2363 bool speculation_complete = true;
2364
2365 /* First insert target from type itself and check if it may have derived types. */
2366 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2367 if (TYPE_FINAL_P (speculative_outer_type->type))
2368 context.speculative_maybe_derived_type = false;
2369 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2370 context.speculative_offset, otr_type);
2371 if (binfo)
2372 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2373 &can_refer);
2374 else
2375 target = NULL;
2376
2377 /* In the case we get complete method, we don't need
2378 to walk derivations. */
2379 if (target && DECL_FINAL_P (target))
2380 context.speculative_maybe_derived_type = false;
2381 if (type_possibly_instantiated_p (speculative_outer_type->type))
2382 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2383 if (binfo)
2384 matched_vtables.add (BINFO_VTABLE (binfo));
2385
2386
2387 /* Next walk recursively all derived types. */
2388 if (context.speculative_maybe_derived_type)
2389 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2390 possible_polymorphic_call_targets_1 (nodes, &inserted,
2391 &matched_vtables,
2392 otr_type,
2393 speculative_outer_type->derived_types[i],
2394 otr_token, speculative_outer_type->type,
2395 context.speculative_offset,
2396 &speculation_complete,
2397 bases_to_consider,
2398 false);
2399 }
2400
2401 if (!speculative || !nodes.length ())
2402 {
2403 /* First see virtual method of type itself. */
2404 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2405 context.offset, otr_type);
2406 if (binfo)
2407 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2408 &can_refer);
2409 else
2410 {
2411 gcc_assert (odr_violation_reported);
2412 target = NULL;
2413 }
2414
2415 /* Destructors are never called through construction virtual tables,
2416 because the type is always known. */
2417 if (target && DECL_CXX_DESTRUCTOR_P (target))
2418 context.maybe_in_construction = false;
2419
2420 if (target)
2421 {
2422 /* In the case we get complete method, we don't need
2423 to walk derivations. */
2424 if (DECL_FINAL_P (target))
2425 context.maybe_derived_type = false;
2426 }
2427
2428 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2429 if (type_possibly_instantiated_p (outer_type->type))
2430 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2431 else
2432 skipped = true;
2433
2434 if (binfo)
2435 matched_vtables.add (BINFO_VTABLE (binfo));
2436
2437 /* Next walk recursively all derived types. */
2438 if (context.maybe_derived_type)
2439 {
2440 for (i = 0; i < outer_type->derived_types.length(); i++)
2441 possible_polymorphic_call_targets_1 (nodes, &inserted,
2442 &matched_vtables,
2443 otr_type,
2444 outer_type->derived_types[i],
2445 otr_token, outer_type->type,
2446 context.offset, &complete,
2447 bases_to_consider,
2448 context.maybe_in_construction);
2449
2450 if (!outer_type->all_derivations_known)
2451 {
2452 if (!speculative && final_warning_records)
2453 {
2454 if (complete
2455 && nodes.length () == 1
2456 && warn_suggest_final_types
2457 && !outer_type->derived_types.length ())
2458 {
2459 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
2460 final_warning_records->type_warnings.safe_grow_cleared
2461 (odr_types.length ());
2462 final_warning_records->type_warnings[outer_type->id].count++;
2463 final_warning_records->type_warnings[outer_type->id].dyn_count
2464 += final_warning_records->dyn_count;
2465 final_warning_records->type_warnings[outer_type->id].type
2466 = outer_type->type;
2467 (*slot)->type_warning = outer_type->id + 1;
2468 }
2469 if (complete
2470 && warn_suggest_final_methods
2471 && nodes.length () == 1
2472 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
2473 outer_type->type))
2474 {
2475 bool existed;
2476 struct decl_warn_count &c =
2477 final_warning_records->decl_warnings.get_or_insert
2478 (nodes[0]->decl, &existed);
2479
2480 if (existed)
2481 {
2482 c.count++;
2483 c.dyn_count += final_warning_records->dyn_count;
2484 }
2485 else
2486 {
2487 c.count = 1;
2488 c.dyn_count = final_warning_records->dyn_count;
2489 c.decl = nodes[0]->decl;
2490 }
2491 (*slot)->decl_warning = nodes[0]->decl;
2492 }
2493 }
2494 complete = false;
2495 }
2496 }
2497
2498 if (!speculative)
2499 {
2500 /* Destructors are never called through construction virtual tables,
2501 because the type is always known. One of entries may be cxa_pure_virtual
2502 so look to at least two of them. */
2503 if (context.maybe_in_construction)
2504 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2505 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2506 context.maybe_in_construction = false;
2507 if (context.maybe_in_construction)
2508 {
2509 if (type != outer_type
2510 && (!skipped
2511 || (context.maybe_derived_type
2512 && !type_all_derivations_known_p (outer_type->type))))
2513 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2514 context.offset, nodes, &inserted,
2515 &matched_vtables, &complete);
2516 if (skipped)
2517 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2518 for (i = 0; i < bases_to_consider.length(); i++)
2519 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2520 }
2521 }
2522 }
2523
2524 (*slot)->targets = nodes;
2525 (*slot)->complete = complete;
2526 if (completep)
2527 *completep = complete;
2528
2529 timevar_pop (TV_IPA_VIRTUAL_CALL);
2530 return nodes;
2531 }
2532
2533 bool
2534 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
2535 vec<const decl_warn_count*> *vec)
2536 {
2537 vec->safe_push (&value);
2538 return true;
2539 }
2540
2541 /* Dump target list TARGETS into FILE. */
2542
2543 static void
2544 dump_targets (FILE *f, vec <cgraph_node *> targets)
2545 {
2546 unsigned int i;
2547
2548 for (i = 0; i < targets.length (); i++)
2549 {
2550 char *name = NULL;
2551 if (in_lto_p)
2552 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2553 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2554 if (in_lto_p)
2555 free (name);
2556 if (!targets[i]->definition)
2557 fprintf (f, " (no definition%s)",
2558 DECL_DECLARED_INLINE_P (targets[i]->decl)
2559 ? " inline" : "");
2560 }
2561 fprintf (f, "\n");
2562 }
2563
2564 /* Dump all possible targets of a polymorphic call. */
2565
2566 void
2567 dump_possible_polymorphic_call_targets (FILE *f,
2568 tree otr_type,
2569 HOST_WIDE_INT otr_token,
2570 const ipa_polymorphic_call_context &ctx)
2571 {
2572 vec <cgraph_node *> targets;
2573 bool final;
2574 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2575 unsigned int len;
2576
2577 if (!type)
2578 return;
2579 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2580 ctx,
2581 &final, NULL, false);
2582 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2583 print_generic_expr (f, type->type, TDF_SLIM);
2584 fprintf (f, " token %i\n", (int)otr_token);
2585
2586 ctx.dump (f);
2587
2588 fprintf (f, " %s%s%s%s\n ",
2589 final ? "This is a complete list." :
2590 "This is partial list; extra targets may be defined in other units.",
2591 ctx.maybe_in_construction ? " (base types included)" : "",
2592 ctx.maybe_derived_type ? " (derived types included)" : "",
2593 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2594 len = targets.length ();
2595 dump_targets (f, targets);
2596
2597 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2598 ctx,
2599 &final, NULL, true);
2600 gcc_assert (targets.length () <= len);
2601 if (targets.length () != len)
2602 {
2603 fprintf (f, " Speculative targets:");
2604 dump_targets (f, targets);
2605 }
2606 fprintf (f, "\n");
2607 }
2608
2609
2610 /* Return true if N can be possibly target of a polymorphic call of
2611 OTR_TYPE/OTR_TOKEN. */
2612
2613 bool
2614 possible_polymorphic_call_target_p (tree otr_type,
2615 HOST_WIDE_INT otr_token,
2616 const ipa_polymorphic_call_context &ctx,
2617 struct cgraph_node *n)
2618 {
2619 vec <cgraph_node *> targets;
2620 unsigned int i;
2621 enum built_in_function fcode;
2622 bool final;
2623
2624 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2625 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2626 == BUILT_IN_UNREACHABLE
2627 || fcode == BUILT_IN_TRAP))
2628 return true;
2629
2630 if (!odr_hash)
2631 return true;
2632 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2633 for (i = 0; i < targets.length (); i++)
2634 if (n->semantically_equivalent_p (targets[i]))
2635 return true;
2636
2637 /* At a moment we allow middle end to dig out new external declarations
2638 as a targets of polymorphic calls. */
2639 if (!final && !n->definition)
2640 return true;
2641 return false;
2642 }
2643
2644
2645
2646 /* Return true if N can be possibly target of a polymorphic call of
2647 OBJ_TYPE_REF expression REF in STMT. */
2648
2649 bool
2650 possible_polymorphic_call_target_p (tree ref,
2651 gimple stmt,
2652 struct cgraph_node *n)
2653 {
2654 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
2655 tree call_fn = gimple_call_fn (stmt);
2656
2657 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
2658 tree_to_uhwi
2659 (OBJ_TYPE_REF_TOKEN (call_fn)),
2660 context,
2661 n);
2662 }
2663
2664
2665 /* After callgraph construction new external nodes may appear.
2666 Add them into the graph. */
2667
2668 void
2669 update_type_inheritance_graph (void)
2670 {
2671 struct cgraph_node *n;
2672
2673 if (!odr_hash)
2674 return;
2675 free_polymorphic_call_targets_hash ();
2676 timevar_push (TV_IPA_INHERITANCE);
2677 /* We reconstruct the graph starting from types of all methods seen in the
2678 the unit. */
2679 FOR_EACH_FUNCTION (n)
2680 if (DECL_VIRTUAL_P (n->decl)
2681 && !n->definition
2682 && n->real_symbol_p ())
2683 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2684 true);
2685 timevar_pop (TV_IPA_INHERITANCE);
2686 }
2687
2688
2689 /* Return true if N looks like likely target of a polymorphic call.
2690 Rule out cxa_pure_virtual, noreturns, function declared cold and
2691 other obvious cases. */
2692
2693 bool
2694 likely_target_p (struct cgraph_node *n)
2695 {
2696 int flags;
2697 /* cxa_pure_virtual and similar things are not likely. */
2698 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2699 return false;
2700 flags = flags_from_decl_or_type (n->decl);
2701 if (flags & ECF_NORETURN)
2702 return false;
2703 if (lookup_attribute ("cold",
2704 DECL_ATTRIBUTES (n->decl)))
2705 return false;
2706 if (n->frequency < NODE_FREQUENCY_NORMAL)
2707 return false;
2708 /* If there are no virtual tables refering the target alive,
2709 the only way the target can be called is an instance comming from other
2710 compilation unit; speculative devirtualization is build around an
2711 assumption that won't happen. */
2712 if (!referenced_from_vtable_p (n))
2713 return false;
2714 return true;
2715 }
2716
2717 /* Compare type warning records P1 and P2 and chose one with larger count;
2718 helper for qsort. */
2719
2720 int
2721 type_warning_cmp (const void *p1, const void *p2)
2722 {
2723 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
2724 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
2725
2726 if (t1->dyn_count < t2->dyn_count)
2727 return 1;
2728 if (t1->dyn_count > t2->dyn_count)
2729 return -1;
2730 return t2->count - t1->count;
2731 }
2732
2733 /* Compare decl warning records P1 and P2 and chose one with larger count;
2734 helper for qsort. */
2735
2736 int
2737 decl_warning_cmp (const void *p1, const void *p2)
2738 {
2739 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
2740 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
2741
2742 if (t1->dyn_count < t2->dyn_count)
2743 return 1;
2744 if (t1->dyn_count > t2->dyn_count)
2745 return -1;
2746 return t2->count - t1->count;
2747 }
2748
2749
2750 /* Try speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
2751 context CTX. */
2752
2753 struct cgraph_node *
2754 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
2755 ipa_polymorphic_call_context ctx)
2756 {
2757 vec <cgraph_node *>targets
2758 = possible_polymorphic_call_targets
2759 (otr_type, otr_token, ctx, NULL, NULL, true);
2760 unsigned int i;
2761 struct cgraph_node *likely_target = NULL;
2762
2763 for (i = 0; i < targets.length (); i++)
2764 if (likely_target_p (targets[i]))
2765 {
2766 if (likely_target)
2767 return NULL;
2768 likely_target = targets[i];
2769 }
2770 if (!likely_target
2771 ||!likely_target->definition
2772 || DECL_EXTERNAL (likely_target->decl))
2773 return NULL;
2774
2775 /* Don't use an implicitly-declared destructor (c++/58678). */
2776 struct cgraph_node *non_thunk_target
2777 = likely_target->function_symbol ();
2778 if (DECL_ARTIFICIAL (non_thunk_target->decl))
2779 return NULL;
2780 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2781 && likely_target->can_be_discarded_p ())
2782 return NULL;
2783 return likely_target;
2784 }
2785
2786 /* The ipa-devirt pass.
2787 When polymorphic call has only one likely target in the unit,
2788 turn it into speculative call. */
2789
2790 static unsigned int
2791 ipa_devirt (void)
2792 {
2793 struct cgraph_node *n;
2794 hash_set<void *> bad_call_targets;
2795 struct cgraph_edge *e;
2796
2797 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2798 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2799 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2800
2801 if (!odr_types_ptr)
2802 return 0;
2803
2804 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
2805 This is implemented by setting up final_warning_records that are updated
2806 by get_polymorphic_call_targets.
2807 We need to clear cache in this case to trigger recomputation of all
2808 entries. */
2809 if (warn_suggest_final_methods || warn_suggest_final_types)
2810 {
2811 final_warning_records = new (final_warning_record);
2812 final_warning_records->type_warnings = vNULL;
2813 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
2814 free_polymorphic_call_targets_hash ();
2815 }
2816
2817 FOR_EACH_DEFINED_FUNCTION (n)
2818 {
2819 bool update = false;
2820 if (!opt_for_fn (n->decl, flag_devirtualize))
2821 continue;
2822 if (dump_file && n->indirect_calls)
2823 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2824 n->name (), n->order);
2825 for (e = n->indirect_calls; e; e = e->next_callee)
2826 if (e->indirect_info->polymorphic)
2827 {
2828 struct cgraph_node *likely_target = NULL;
2829 void *cache_token;
2830 bool final;
2831
2832 if (final_warning_records)
2833 final_warning_records->dyn_count = e->count;
2834
2835 vec <cgraph_node *>targets
2836 = possible_polymorphic_call_targets
2837 (e, &final, &cache_token, true);
2838 unsigned int i;
2839
2840 /* Trigger warnings by calculating non-speculative targets. */
2841 if (warn_suggest_final_methods || warn_suggest_final_types)
2842 possible_polymorphic_call_targets (e);
2843
2844 if (dump_file)
2845 dump_possible_polymorphic_call_targets
2846 (dump_file, e);
2847
2848 npolymorphic++;
2849
2850 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
2851 continue;
2852
2853 if (!e->maybe_hot_p ())
2854 {
2855 if (dump_file)
2856 fprintf (dump_file, "Call is cold\n\n");
2857 ncold++;
2858 continue;
2859 }
2860 if (e->speculative)
2861 {
2862 if (dump_file)
2863 fprintf (dump_file, "Call is aready speculated\n\n");
2864 nspeculated++;
2865
2866 /* When dumping see if we agree with speculation. */
2867 if (!dump_file)
2868 continue;
2869 }
2870 if (bad_call_targets.contains (cache_token))
2871 {
2872 if (dump_file)
2873 fprintf (dump_file, "Target list is known to be useless\n\n");
2874 nmultiple++;
2875 continue;
2876 }
2877 for (i = 0; i < targets.length (); i++)
2878 if (likely_target_p (targets[i]))
2879 {
2880 if (likely_target)
2881 {
2882 likely_target = NULL;
2883 if (dump_file)
2884 fprintf (dump_file, "More than one likely target\n\n");
2885 nmultiple++;
2886 break;
2887 }
2888 likely_target = targets[i];
2889 }
2890 if (!likely_target)
2891 {
2892 bad_call_targets.add (cache_token);
2893 continue;
2894 }
2895 /* This is reached only when dumping; check if we agree or disagree
2896 with the speculation. */
2897 if (e->speculative)
2898 {
2899 struct cgraph_edge *e2;
2900 struct ipa_ref *ref;
2901 e->speculative_call_info (e2, e, ref);
2902 if (e2->callee->ultimate_alias_target ()
2903 == likely_target->ultimate_alias_target ())
2904 {
2905 fprintf (dump_file, "We agree with speculation\n\n");
2906 nok++;
2907 }
2908 else
2909 {
2910 fprintf (dump_file, "We disagree with speculation\n\n");
2911 nwrong++;
2912 }
2913 continue;
2914 }
2915 if (!likely_target->definition)
2916 {
2917 if (dump_file)
2918 fprintf (dump_file, "Target is not an definition\n\n");
2919 nnotdefined++;
2920 continue;
2921 }
2922 /* Do not introduce new references to external symbols. While we
2923 can handle these just well, it is common for programs to
2924 incorrectly with headers defining methods they are linked
2925 with. */
2926 if (DECL_EXTERNAL (likely_target->decl))
2927 {
2928 if (dump_file)
2929 fprintf (dump_file, "Target is external\n\n");
2930 nexternal++;
2931 continue;
2932 }
2933 /* Don't use an implicitly-declared destructor (c++/58678). */
2934 struct cgraph_node *non_thunk_target
2935 = likely_target->function_symbol ();
2936 if (DECL_ARTIFICIAL (non_thunk_target->decl))
2937 {
2938 if (dump_file)
2939 fprintf (dump_file, "Target is artificial\n\n");
2940 nartificial++;
2941 continue;
2942 }
2943 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2944 && likely_target->can_be_discarded_p ())
2945 {
2946 if (dump_file)
2947 fprintf (dump_file, "Target is overwritable\n\n");
2948 noverwritable++;
2949 continue;
2950 }
2951 else if (dbg_cnt (devirt))
2952 {
2953 if (dump_enabled_p ())
2954 {
2955 location_t locus = gimple_location_safe (e->call_stmt);
2956 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2957 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2958 n->name (), n->order,
2959 likely_target->name (),
2960 likely_target->order);
2961 }
2962 if (!likely_target->can_be_discarded_p ())
2963 {
2964 cgraph_node *alias;
2965 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
2966 if (alias)
2967 likely_target = alias;
2968 }
2969 nconverted++;
2970 update = true;
2971 e->make_speculative
2972 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2973 }
2974 }
2975 if (update)
2976 inline_update_overall_summary (n);
2977 }
2978 if (warn_suggest_final_methods || warn_suggest_final_types)
2979 {
2980 if (warn_suggest_final_types)
2981 {
2982 final_warning_records->type_warnings.qsort (type_warning_cmp);
2983 for (unsigned int i = 0;
2984 i < final_warning_records->type_warnings.length (); i++)
2985 if (final_warning_records->type_warnings[i].count)
2986 {
2987 tree type = final_warning_records->type_warnings[i].type;
2988 int count = final_warning_records->type_warnings[i].count;
2989 long long dyn_count
2990 = final_warning_records->type_warnings[i].dyn_count;
2991
2992 if (!dyn_count)
2993 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
2994 OPT_Wsuggest_final_types, count,
2995 "Declaring type %qD final "
2996 "would enable devirtualization of %i call",
2997 "Declaring type %qD final "
2998 "would enable devirtualization of %i calls",
2999 type,
3000 count);
3001 else
3002 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3003 OPT_Wsuggest_final_types, count,
3004 "Declaring type %qD final "
3005 "would enable devirtualization of %i call "
3006 "executed %lli times",
3007 "Declaring type %qD final "
3008 "would enable devirtualization of %i calls "
3009 "executed %lli times",
3010 type,
3011 count,
3012 dyn_count);
3013 }
3014 }
3015
3016 if (warn_suggest_final_methods)
3017 {
3018 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3019
3020 final_warning_records->decl_warnings.traverse
3021 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3022 decl_warnings_vec.qsort (decl_warning_cmp);
3023 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3024 {
3025 tree decl = decl_warnings_vec[i]->decl;
3026 int count = decl_warnings_vec[i]->count;
3027 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3028
3029 if (!dyn_count)
3030 if (DECL_CXX_DESTRUCTOR_P (decl))
3031 warning_n (DECL_SOURCE_LOCATION (decl),
3032 OPT_Wsuggest_final_methods, count,
3033 "Declaring virtual destructor of %qD final "
3034 "would enable devirtualization of %i call",
3035 "Declaring virtual destructor of %qD final "
3036 "would enable devirtualization of %i calls",
3037 DECL_CONTEXT (decl), count);
3038 else
3039 warning_n (DECL_SOURCE_LOCATION (decl),
3040 OPT_Wsuggest_final_methods, count,
3041 "Declaring method %qD final "
3042 "would enable devirtualization of %i call",
3043 "Declaring method %qD final "
3044 "would enable devirtualization of %i calls",
3045 decl, count);
3046 else if (DECL_CXX_DESTRUCTOR_P (decl))
3047 warning_n (DECL_SOURCE_LOCATION (decl),
3048 OPT_Wsuggest_final_methods, count,
3049 "Declaring virtual destructor of %qD final "
3050 "would enable devirtualization of %i call "
3051 "executed %lli times",
3052 "Declaring virtual destructor of %qD final "
3053 "would enable devirtualization of %i calls "
3054 "executed %lli times",
3055 DECL_CONTEXT (decl), count, dyn_count);
3056 else
3057 warning_n (DECL_SOURCE_LOCATION (decl),
3058 OPT_Wsuggest_final_methods, count,
3059 "Declaring method %qD final "
3060 "would enable devirtualization of %i call "
3061 "executed %lli times",
3062 "Declaring method %qD final "
3063 "would enable devirtualization of %i calls "
3064 "executed %lli times",
3065 decl, count, dyn_count);
3066 }
3067 }
3068
3069 delete (final_warning_records);
3070 final_warning_records = 0;
3071 }
3072
3073 if (dump_file)
3074 fprintf (dump_file,
3075 "%i polymorphic calls, %i devirtualized,"
3076 " %i speculatively devirtualized, %i cold\n"
3077 "%i have multiple targets, %i overwritable,"
3078 " %i already speculated (%i agree, %i disagree),"
3079 " %i external, %i not defined, %i artificial\n",
3080 npolymorphic, ndevirtualized, nconverted, ncold,
3081 nmultiple, noverwritable, nspeculated, nok, nwrong,
3082 nexternal, nnotdefined, nartificial);
3083 return ndevirtualized ? TODO_remove_functions : 0;
3084 }
3085
3086 namespace {
3087
3088 const pass_data pass_data_ipa_devirt =
3089 {
3090 IPA_PASS, /* type */
3091 "devirt", /* name */
3092 OPTGROUP_NONE, /* optinfo_flags */
3093 TV_IPA_DEVIRT, /* tv_id */
3094 0, /* properties_required */
3095 0, /* properties_provided */
3096 0, /* properties_destroyed */
3097 0, /* todo_flags_start */
3098 ( TODO_dump_symtab ), /* todo_flags_finish */
3099 };
3100
3101 class pass_ipa_devirt : public ipa_opt_pass_d
3102 {
3103 public:
3104 pass_ipa_devirt (gcc::context *ctxt)
3105 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3106 NULL, /* generate_summary */
3107 NULL, /* write_summary */
3108 NULL, /* read_summary */
3109 NULL, /* write_optimization_summary */
3110 NULL, /* read_optimization_summary */
3111 NULL, /* stmt_fixup */
3112 0, /* function_transform_todo_flags_start */
3113 NULL, /* function_transform */
3114 NULL) /* variable_transform */
3115 {}
3116
3117 /* opt_pass methods: */
3118 virtual bool gate (function *)
3119 {
3120 /* In LTO, always run the IPA passes and decide on function basis if the
3121 pass is enabled. */
3122 if (in_lto_p)
3123 return true;
3124 return (flag_devirtualize
3125 && (flag_devirtualize_speculatively
3126 || (warn_suggest_final_methods
3127 || warn_suggest_final_types))
3128 && optimize);
3129 }
3130
3131 virtual unsigned int execute (function *) { return ipa_devirt (); }
3132
3133 }; // class pass_ipa_devirt
3134
3135 } // anon namespace
3136
3137 ipa_opt_pass_d *
3138 make_pass_ipa_devirt (gcc::context *ctxt)
3139 {
3140 return new pass_ipa_devirt (ctxt);
3141 }
3142
3143 #include "gt-ipa-devirt.h"