]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
decl.c (start_decl): Look through member variable template.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
6e2830c3 118#include "hash-set.h"
eefe9a99
JH
119#include "target.h"
120#include "hash-table.h"
6d8eb96b 121#include "inchash.h"
eefe9a99
JH
122#include "tree-pretty-print.h"
123#include "ipa-utils.h"
2fb9a547
AM
124#include "tree-ssa-alias.h"
125#include "internal-fn.h"
126#include "gimple-fold.h"
127#include "gimple-expr.h"
eefe9a99 128#include "gimple.h"
bbc9396b 129#include "ipa-inline.h"
61a74079 130#include "diagnostic.h"
68377e53 131#include "tree-dfa.h"
ec77d61f 132#include "demangle.h"
2b5f0895 133#include "dbgcnt.h"
7d0aa05b 134#include "gimple-pretty-print.h"
c59f7203
JH
135#include "stor-layout.h"
136#include "intl.h"
91bc34a9 137#include "hash-map.h"
c59f7203 138
6e2830c3
TS
139static bool odr_types_equivalent_p (tree, tree, bool, bool *,
140 hash_set<tree> *);
ec77d61f
JH
141
142static bool odr_violation_reported = false;
68377e53 143
eefe9a99 144
0e1474e5 145/* Pointer set of all call targets appearing in the cache. */
6e2830c3 146static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
0e1474e5 147
eefe9a99
JH
148/* The node of type inheritance graph. For each type unique in
149 One Defintion Rule (ODR) sense, we produce one node linking all
150 main variants of types equivalent to it, bases and derived types. */
151
152struct GTY(()) odr_type_d
153{
eefe9a99
JH
154 /* leader type. */
155 tree type;
549bcbd1 156 /* All bases; built only for main variants of types */
eefe9a99 157 vec<odr_type> GTY((skip)) bases;
549bcbd1
JH
158 /* All derrived types with virtual methods seen in unit;
159 built only for main variants oftypes */
eefe9a99 160 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 161
61a74079
JH
162 /* All equivalent types, if more than one. */
163 vec<tree, va_gc> *types;
164 /* Set of all equivalent types, if NON-NULL. */
6e2830c3 165 hash_set<tree> * GTY((skip)) types_set;
61a74079 166
0e1474e5
JH
167 /* Unique ID indexing the type in odr_types array. */
168 int id;
eefe9a99
JH
169 /* Is it in anonymous namespace? */
170 bool anonymous_namespace;
2d1644bf
JH
171 /* Do we know about all derivations of given type? */
172 bool all_derivations_known;
549bcbd1
JH
173 /* Did we report ODR violation here? */
174 bool odr_violated;
eefe9a99
JH
175};
176
3339f0bc
JH
177static bool contains_type_p (tree, HOST_WIDE_INT, tree);
178
eefe9a99 179
0e1474e5
JH
180/* Return true if BINFO corresponds to a type with virtual methods.
181
182 Every type has several BINFOs. One is the BINFO associated by the type
183 while other represents bases of derived types. The BINFOs representing
184 bases do not have BINFO_VTABLE pointer set when this is the single
185 inheritance (because vtables are shared). Look up the BINFO of type
186 and check presence of its vtable. */
eefe9a99
JH
187
188static inline bool
189polymorphic_type_binfo_p (tree binfo)
190{
191 /* See if BINFO's type has an virtual table associtated with it. */
192 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
193}
194
2d1644bf
JH
195/* Return TRUE if all derived types of T are known and thus
196 we may consider the walk of derived type complete.
197
198 This is typically true only for final anonymous namespace types and types
199 defined within functions (that may be COMDAT and thus shared across units,
200 but with the same set of derived types). */
201
202static bool
203type_all_derivations_known_p (tree t)
204{
205 if (TYPE_FINAL_P (t))
206 return true;
207 if (flag_ltrans)
208 return false;
209 if (type_in_anonymous_namespace_p (t))
210 return true;
211 return (decl_function_context (TYPE_NAME (t)) != NULL);
212}
213
214/* Return TURE if type's constructors are all visible. */
215
216static bool
217type_all_ctors_visible_p (tree t)
218{
219 return !flag_ltrans
220 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
221 /* We can not always use type_all_derivations_known_p.
222 For function local types we must assume case where
223 the function is COMDAT and shared in between units.
224
225 TODO: These cases are quite easy to get, but we need
226 to keep track of C++ privatizing via -Wno-weak
227 as well as the IPA privatizing. */
228 && type_in_anonymous_namespace_p (t);
229}
230
231/* Return TRUE if type may have instance. */
232
233static bool
234type_possibly_instantiated_p (tree t)
235{
236 tree vtable;
237 varpool_node *vnode;
238
239 /* TODO: Add abstract types here. */
240 if (!type_all_ctors_visible_p (t))
241 return true;
242
243 vtable = BINFO_VTABLE (TYPE_BINFO (t));
244 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
245 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
9041d2e6 246 vnode = varpool_node::get (vtable);
2d1644bf
JH
247 return vnode && vnode->definition;
248}
249
eefe9a99
JH
250/* One Definition Rule hashtable helpers. */
251
252struct odr_hasher
253{
254 typedef odr_type_d value_type;
255 typedef union tree_node compare_type;
256 static inline hashval_t hash (const value_type *);
257 static inline bool equal (const value_type *, const compare_type *);
258 static inline void remove (value_type *);
259};
260
549bcbd1
JH
261/* Return type that was declared with T's name so that T is an
262 qualified variant of it. */
263
264static inline tree
265main_odr_variant (const_tree t)
266{
267 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
268 return TREE_TYPE (TYPE_NAME (t));
269 /* Unnamed types and non-C++ produced types can be compared by variants. */
270 else
271 return TYPE_MAIN_VARIANT (t);
272}
273
eefe9a99
JH
274/* Produce hash based on type name. */
275
549bcbd1 276static hashval_t
eefe9a99
JH
277hash_type_name (tree t)
278{
549bcbd1 279 gcc_checking_assert (main_odr_variant (t) == t);
eefe9a99
JH
280
281 /* If not in LTO, all main variants are unique, so we can do
282 pointer hash. */
283 if (!in_lto_p)
284 return htab_hash_pointer (t);
285
286 /* Anonymous types are unique. */
287 if (type_in_anonymous_namespace_p (t))
288 return htab_hash_pointer (t);
289
61a74079 290 /* For polymorphic types, we can simply hash the virtual table. */
549bcbd1
JH
291 if (TREE_CODE (t) == RECORD_TYPE
292 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
61a74079
JH
293 {
294 tree v = BINFO_VTABLE (TYPE_BINFO (t));
295 hashval_t hash = 0;
296
297 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
298 {
299 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
300 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
301 }
302
303 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
304 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
305 return hash;
306 }
307
eefe9a99
JH
308 /* Rest is not implemented yet. */
309 gcc_unreachable ();
310}
311
312/* Return the computed hashcode for ODR_TYPE. */
313
314inline hashval_t
315odr_hasher::hash (const value_type *odr_type)
316{
317 return hash_type_name (odr_type->type);
318}
319
549bcbd1
JH
320/* For languages with One Definition Rule, work out if
321 types are the same based on their name.
322
323 This is non-trivial for LTO where minnor differences in
324 the type representation may have prevented type merging
325 to merge two copies of otherwise equivalent type.
326
327 Until we start streaming mangled type names, this function works
328 only for polymorphic types. */
329
330bool
331types_same_for_odr (const_tree type1, const_tree type2)
332{
333 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
334
335 type1 = main_odr_variant (type1);
336 type2 = main_odr_variant (type2);
337
338 if (type1 == type2)
339 return true;
340
341 if (!in_lto_p)
342 return false;
343
344 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
345 on the corresponding TYPE_STUB_DECL. */
346 if (type_in_anonymous_namespace_p (type1)
347 || type_in_anonymous_namespace_p (type2))
348 return false;
349
01a92e70
JH
350 /* See if types are obvoiusly different (i.e. different codes
351 or polymorphis wrt non-polymorphic). This is not strictly correct
352 for ODR violating programs, but we can't do better without streaming
353 ODR names. */
354 if (TREE_CODE (type1) != TREE_CODE (type2))
355 return false;
356 if (TREE_CODE (type1) == RECORD_TYPE
357 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
358 return false;
359 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
360 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
361 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
362 return false;
363
549bcbd1
JH
364 /* At the moment we have no way to establish ODR equivlaence at LTO
365 other than comparing virtual table pointrs of polymorphic types.
366 Eventually we should start saving mangled names in TYPE_NAME.
367 Then this condition will become non-trivial. */
368
369 if (TREE_CODE (type1) == RECORD_TYPE
370 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
371 && BINFO_VTABLE (TYPE_BINFO (type1))
372 && BINFO_VTABLE (TYPE_BINFO (type2)))
373 {
374 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
375 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
376 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
377 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
378 return (operand_equal_p (TREE_OPERAND (v1, 1),
379 TREE_OPERAND (v2, 1), 0)
380 && DECL_ASSEMBLER_NAME
381 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
382 == DECL_ASSEMBLER_NAME
383 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
384 }
385 gcc_unreachable ();
386}
387
388
0e1474e5 389/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
390 equivalent. */
391
392inline bool
393odr_hasher::equal (const value_type *t1, const compare_type *ct2)
394{
395 tree t2 = const_cast <tree> (ct2);
396
549bcbd1 397 gcc_checking_assert (main_odr_variant (t2) == t2);
eefe9a99
JH
398 if (t1->type == t2)
399 return true;
400 if (!in_lto_p)
401 return false;
402 return types_same_for_odr (t1->type, t2);
403}
404
0e1474e5 405/* Free ODR type V. */
eefe9a99
JH
406
407inline void
408odr_hasher::remove (value_type *v)
409{
410 v->bases.release ();
411 v->derived_types.release ();
61a74079 412 if (v->types_set)
6e2830c3 413 delete v->types_set;
eefe9a99
JH
414 ggc_free (v);
415}
416
417/* ODR type hash used to lookup ODR type based on tree type node. */
418
c203e8a7
TS
419typedef hash_table<odr_hasher> odr_hash_type;
420static odr_hash_type *odr_hash;
eefe9a99
JH
421
422/* ODR types are also stored into ODR_TYPE vector to allow consistent
423 walking. Bases appear before derived types. Vector is garbage collected
424 so we won't end up visiting empty types. */
425
426static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
427#define odr_types (*odr_types_ptr)
428
c7e1befa
JH
429/* Set TYPE_BINFO of TYPE and its variants to BINFO. */
430void
431set_type_binfo (tree type, tree binfo)
432{
433 for (; type; type = TYPE_NEXT_VARIANT (type))
434 if (COMPLETE_TYPE_P (type))
435 TYPE_BINFO (type) = binfo;
436 else
437 gcc_assert (!TYPE_BINFO (type));
438}
439
c59f7203
JH
440/* Compare T2 and T2 based on name or structure. */
441
442static bool
6e2830c3 443odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
c59f7203
JH
444{
445 bool an1, an2;
446
447 /* This can happen in incomplete types that should be handled earlier. */
448 gcc_assert (t1 && t2);
449
450 t1 = main_odr_variant (t1);
451 t2 = main_odr_variant (t2);
452 if (t1 == t2)
453 return true;
454 if (TREE_CODE (t1) != TREE_CODE (t2))
455 return false;
456 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
457 return false;
458 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
459 return false;
460
461 /* Anonymous namespace types must match exactly. */
462 an1 = type_in_anonymous_namespace_p (t1);
463 an2 = type_in_anonymous_namespace_p (t2);
464 if (an1 != an2 || an1)
465 return false;
466
467 /* For types where we can not establish ODR equivalency, recurse and deeply
468 compare. */
469 if (TREE_CODE (t1) != RECORD_TYPE
470 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
471 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
472 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
473 {
474 /* This should really be a pair hash, but for the moment we do not need
475 100% reliability and it would be better to compare all ODR types so
476 recursion here is needed only for component types. */
6e2830c3 477 if (visited->add (t1))
c59f7203 478 return true;
69dc8208 479 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
c59f7203
JH
480 }
481 return types_same_for_odr (t1, t2);
482}
483
56b1f114
JH
484/* Compare two virtual tables, PREVAILING and VTABLE and output ODR
485 violation warings. */
486
487void
488compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
489{
490 int n1, n2;
491 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
492 {
493 odr_violation_reported = true;
494 if (DECL_VIRTUAL_P (prevailing->decl))
495 {
496 varpool_node *tmp = prevailing;
497 prevailing = vtable;
498 vtable = tmp;
499 }
500 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
501 OPT_Wodr,
502 "virtual table of type %qD violates one definition rule",
503 DECL_CONTEXT (vtable->decl)))
504 inform (DECL_SOURCE_LOCATION (prevailing->decl),
505 "variable of same assembler name as the virtual table is "
506 "defined in another translation unit");
507 return;
508 }
509 if (!prevailing->definition || !vtable->definition)
510 return;
511 for (n1 = 0, n2 = 0; true; n1++, n2++)
512 {
513 struct ipa_ref *ref1, *ref2;
514 bool end1, end2;
515 end1 = !prevailing->iterate_reference (n1, ref1);
516 end2 = !vtable->iterate_reference (n2, ref2);
517 if (end1 && end2)
518 return;
519 if (!end1 && !end2
520 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
521 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
522 && !n2
523 && !DECL_VIRTUAL_P (ref2->referred->decl)
524 && DECL_VIRTUAL_P (ref1->referred->decl))
525 {
526 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
527 "virtual table of type %qD contains RTTI information",
528 DECL_CONTEXT (vtable->decl)))
529 {
530 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
531 "but is prevailed by one without from other translation unit");
532 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
533 "RTTI will not work on this type");
534 }
535 n2++;
536 end2 = !vtable->iterate_reference (n2, ref2);
537 }
538 if (!end1 && !end2
539 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
540 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
541 && !n1
542 && !DECL_VIRTUAL_P (ref1->referred->decl)
543 && DECL_VIRTUAL_P (ref2->referred->decl))
544 {
545 n1++;
546 end1 = !vtable->iterate_reference (n1, ref1);
547 }
548 if (end1 || end2)
549 {
550 if (end1)
551 {
552 varpool_node *tmp = prevailing;
553 prevailing = vtable;
554 vtable = tmp;
555 ref1 = ref2;
556 }
557 if (warning_at (DECL_SOURCE_LOCATION
558 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
559 "virtual table of type %qD violates "
560 "one definition rule",
561 DECL_CONTEXT (vtable->decl)))
562 {
563 inform (DECL_SOURCE_LOCATION
564 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
565 "the conflicting type defined in another translation "
566 "unit");
567 inform (DECL_SOURCE_LOCATION
568 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
569 "contains additional virtual method %qD",
570 ref1->referred->decl);
571 }
572 return;
573 }
574 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
575 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
576 {
577 if (warning_at (DECL_SOURCE_LOCATION
578 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
579 "virtual table of type %qD violates "
580 "one definition rule ",
581 DECL_CONTEXT (vtable->decl)))
582 {
583 inform (DECL_SOURCE_LOCATION
584 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
585 "the conflicting type defined in another translation "
586 "unit");
587 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
588 "virtual method %qD", ref1->referred->decl);
589 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
590 "ought to match virtual method %qD but does not",
591 ref2->referred->decl);
592 return;
593 }
594 }
595 }
596}
597
c59f7203
JH
598/* Output ODR violation warning about T1 and T2 with REASON.
599 Display location of ST1 and ST2 if REASON speaks about field or
600 method of the type.
601 If WARN is false, do nothing. Set WARNED if warning was indeed
602 output. */
603
604void
605warn_odr (tree t1, tree t2, tree st1, tree st2,
606 bool warn, bool *warned, const char *reason)
607{
608 tree decl2 = TYPE_NAME (t2);
609
610 if (!warn)
611 return;
612 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
613 "type %qT violates one definition rule",
614 t1))
615 return;
616 if (!st1)
617 ;
618 else if (TREE_CODE (st1) == FIELD_DECL)
619 {
620 inform (DECL_SOURCE_LOCATION (decl2),
621 "a different type is defined in another translation unit");
622 inform (DECL_SOURCE_LOCATION (st1),
623 "the first difference of corresponding definitions is field %qD",
624 st1);
625 decl2 = st2;
626 }
627 else if (TREE_CODE (st1) == FUNCTION_DECL)
628 {
629 inform (DECL_SOURCE_LOCATION (decl2),
630 "a different type is defined in another translation unit");
631 inform (DECL_SOURCE_LOCATION (st1),
632 "the first difference of corresponding definitions is method %qD",
633 st1);
634 decl2 = st2;
635 }
636 else
637 return;
638 inform (DECL_SOURCE_LOCATION (decl2), reason);
639
640 if (warned)
641 *warned = true;
642}
643
644/* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
645 because they are used on same place in ODR matching types.
646 They are not; inform the user. */
647
648void
649warn_types_mismatch (tree t1, tree t2)
650{
651 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
652 return;
653 /* In Firefox it is a common bug to have same types but in
654 different namespaces. Be a bit more informative on
655 this. */
656 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
657 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
658 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
659 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
660 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
661 DECL_NAME (TYPE_CONTEXT (t2))))))
662 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
663 "type %qT should match type %qT but is defined "
664 "in different namespace ",
665 t1, t2);
666 else
667 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
668 "type %qT should match type %qT",
669 t1, t2);
670 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
671 "the incompatible type is defined here");
672}
673
674/* Compare T1 and T2, report ODR violations if WARN is true and set
675 WARNED to true if anything is reported. Return true if types match.
676 If true is returned, the types are also compatible in the sense of
677 gimple_canonical_types_compatible_p. */
678
679static bool
6e2830c3 680odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
c59f7203
JH
681{
682 /* Check first for the obvious case of pointer identity. */
683 if (t1 == t2)
684 return true;
685 gcc_assert (!type_in_anonymous_namespace_p (t1));
686 gcc_assert (!type_in_anonymous_namespace_p (t2));
687
688 /* Can't be the same type if the types don't have the same code. */
689 if (TREE_CODE (t1) != TREE_CODE (t2))
690 {
691 warn_odr (t1, t2, NULL, NULL, warn, warned,
692 G_("a different type is defined in another translation unit"));
693 return false;
694 }
695
696 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
697 {
698 warn_odr (t1, t2, NULL, NULL, warn, warned,
699 G_("a type with different qualifiers is defined in another "
700 "translation unit"));
701 return false;
702 }
703
704 if (comp_type_attributes (t1, t2) != 1)
705 {
706 warn_odr (t1, t2, NULL, NULL, warn, warned,
707 G_("a type with attributes "
708 "is defined in another translation unit"));
709 return false;
710 }
711
712 if (TREE_CODE (t1) == ENUMERAL_TYPE)
713 {
714 tree v1, v2;
715 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
716 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
717 {
718 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
719 {
720 warn_odr (t1, t2, NULL, NULL, warn, warned,
721 G_("an enum with different value name"
722 " is defined in another translation unit"));
723 return false;
724 }
725 if (TREE_VALUE (v1) != TREE_VALUE (v2)
726 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
727 DECL_INITIAL (TREE_VALUE (v2)), 0))
728 {
729 warn_odr (t1, t2, NULL, NULL, warn, warned,
730 G_("an enum with different values is defined"
731 " in another translation unit"));
732 return false;
733 }
734 }
735 if (v1 || v2)
736 {
737 warn_odr (t1, t2, NULL, NULL, warn, warned,
738 G_("an enum with mismatching number of values "
739 "is defined in another translation unit"));
740 return false;
741 }
742 }
743
744 /* Non-aggregate types can be handled cheaply. */
745 if (INTEGRAL_TYPE_P (t1)
746 || SCALAR_FLOAT_TYPE_P (t1)
747 || FIXED_POINT_TYPE_P (t1)
748 || TREE_CODE (t1) == VECTOR_TYPE
749 || TREE_CODE (t1) == COMPLEX_TYPE
750 || TREE_CODE (t1) == OFFSET_TYPE
751 || POINTER_TYPE_P (t1))
752 {
753 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
754 {
755 warn_odr (t1, t2, NULL, NULL, warn, warned,
756 G_("a type with different precision is defined "
757 "in another translation unit"));
758 return false;
759 }
760 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
761 {
762 warn_odr (t1, t2, NULL, NULL, warn, warned,
763 G_("a type with different signedness is defined "
764 "in another translation unit"));
765 return false;
766 }
767
768 if (TREE_CODE (t1) == INTEGER_TYPE
769 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
770 {
771 /* char WRT uint_8? */
772 warn_odr (t1, t2, NULL, NULL, warn, warned,
773 G_("a different type is defined in another "
774 "translation unit"));
775 return false;
776 }
777
778 /* For canonical type comparisons we do not want to build SCCs
779 so we cannot compare pointed-to types. But we can, for now,
780 require the same pointed-to type kind and match what
781 useless_type_conversion_p would do. */
782 if (POINTER_TYPE_P (t1))
783 {
784 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
785 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
786 {
787 warn_odr (t1, t2, NULL, NULL, warn, warned,
788 G_("it is defined as a pointer in different address "
789 "space in another translation unit"));
790 return false;
791 }
792
793 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
794 {
795 warn_odr (t1, t2, NULL, NULL, warn, warned,
796 G_("it is defined as a pointer to different type "
797 "in another translation unit"));
798 if (warn && warned)
799 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
800 return false;
801 }
802 }
803
804 /* Tail-recurse to components. */
805 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
806 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
807 {
808 /* Probably specific enough. */
809 warn_odr (t1, t2, NULL, NULL, warn, warned,
810 G_("a different type is defined "
811 "in another translation unit"));
812 if (warn && warned)
813 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
814 return false;
815 }
816
817 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
818 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
819 TYPE_SIZE_UNIT (t2), 0));
820 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
821
822 return true;
823 }
824
825 /* Do type-specific comparisons. */
826 switch (TREE_CODE (t1))
827 {
828 case ARRAY_TYPE:
829 {
830 /* Array types are the same if the element types are the same and
831 the number of elements are the same. */
832 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
833 {
834 warn_odr (t1, t2, NULL, NULL, warn, warned,
835 G_("a different type is defined in another "
836 "translation unit"));
837 if (warn && warned)
838 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
839 }
840 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
841 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
842 == TYPE_NONALIASED_COMPONENT (t2));
843
844 tree i1 = TYPE_DOMAIN (t1);
845 tree i2 = TYPE_DOMAIN (t2);
846
847 /* For an incomplete external array, the type domain can be
848 NULL_TREE. Check this condition also. */
849 if (i1 == NULL_TREE || i2 == NULL_TREE)
850 return true;
851
852 tree min1 = TYPE_MIN_VALUE (i1);
853 tree min2 = TYPE_MIN_VALUE (i2);
854 tree max1 = TYPE_MAX_VALUE (i1);
855 tree max2 = TYPE_MAX_VALUE (i2);
856
857 /* In C++, minimums should be always 0. */
858 gcc_assert (min1 == min2);
859 if (!operand_equal_p (max1, max2, 0))
860 {
861 warn_odr (t1, t2, NULL, NULL, warn, warned,
862 G_("an array of different size is defined "
863 "in another translation unit"));
864 return false;
865 }
866 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
867 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
868 TYPE_SIZE_UNIT (t2), 0));
869 }
870 return true;
871
872 case METHOD_TYPE:
873 case FUNCTION_TYPE:
874 /* Function types are the same if the return type and arguments types
875 are the same. */
876 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
877 {
878 warn_odr (t1, t2, NULL, NULL, warn, warned,
879 G_("has different return value "
880 "in another translation unit"));
881 if (warn && warned)
882 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
883 return false;
884 }
885
886 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
887 return true;
888 else
889 {
890 tree parms1, parms2;
891
892 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
893 parms1 && parms2;
894 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
895 {
896 if (!odr_subtypes_equivalent_p
897 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
898 {
899 warn_odr (t1, t2, NULL, NULL, warn, warned,
900 G_("has different parameters in another "
901 "translation unit"));
902 if (warn && warned)
903 warn_types_mismatch (TREE_VALUE (parms1),
904 TREE_VALUE (parms2));
905 return false;
906 }
907 }
908
909 if (parms1 || parms2)
910 {
911 warn_odr (t1, t2, NULL, NULL, warn, warned,
912 G_("has different parameters "
913 "in another translation unit"));
914 return false;
915 }
916
917 return true;
918 }
919
920 case RECORD_TYPE:
921 case UNION_TYPE:
922 case QUAL_UNION_TYPE:
923 {
924 tree f1, f2;
925
926 /* For aggregate types, all the fields must be the same. */
927 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
928 {
929 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
930 f1 || f2;
931 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
932 {
933 /* Skip non-fields. */
934 while (f1 && TREE_CODE (f1) != FIELD_DECL)
935 f1 = TREE_CHAIN (f1);
936 while (f2 && TREE_CODE (f2) != FIELD_DECL)
937 f2 = TREE_CHAIN (f2);
938 if (!f1 || !f2)
939 break;
940 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
941 break;
942 if (DECL_NAME (f1) != DECL_NAME (f2)
943 && !DECL_ARTIFICIAL (f1))
944 {
945 warn_odr (t1, t2, f1, f2, warn, warned,
946 G_("a field with different name is defined "
947 "in another translation unit"));
948 return false;
949 }
950 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
951 {
952 /* Do not warn about artificial fields and just go into generic
953 field mismatch warning. */
954 if (DECL_ARTIFICIAL (f1))
955 break;
956
957 warn_odr (t1, t2, f1, f2, warn, warned,
958 G_("a field of same name but different type "
959 "is defined in another translation unit"));
960 if (warn && warned)
961 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
962 return false;
963 }
964 if (!gimple_compare_field_offset (f1, f2))
965 {
966 /* Do not warn about artificial fields and just go into generic
967 field mismatch warning. */
968 if (DECL_ARTIFICIAL (f1))
969 break;
970 warn_odr (t1, t2, t1, t2, warn, warned,
971 G_("fields has different layout "
972 "in another translation unit"));
973 return false;
974 }
975 gcc_assert (DECL_NONADDRESSABLE_P (f1)
976 == DECL_NONADDRESSABLE_P (f2));
977 }
978
979 /* If one aggregate has more fields than the other, they
980 are not the same. */
981 if (f1 || f2)
982 {
983 warn_odr (t1, t2, NULL, NULL, warn, warned,
984 G_("a type with different number of fields "
985 "is defined in another translation unit"));
986 return false;
987 }
988 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
989 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
990 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
991 {
992 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
993 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
994 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
995 {
996 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
997 {
998 warn_odr (t1, t2, f1, f2, warn, warned,
999 G_("a different method of same type "
1000 "is defined in another translation unit"));
1001 return false;
1002 }
1003 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1004 {
1005 warn_odr (t1, t2, f1, f2, warn, warned,
1006 G_("s definition that differs by virtual "
1007 "keyword in another translation unit"));
1008 return false;
1009 }
1010 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1011 {
1012 warn_odr (t1, t2, f1, f2, warn, warned,
1013 G_("virtual table layout differs in another "
1014 "translation unit"));
1015 return false;
1016 }
1017 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1018 {
1019 warn_odr (t1, t2, f1, f2, warn, warned,
1020 G_("method with incompatible type is defined "
1021 "in another translation unit"));
1022 return false;
1023 }
1024 }
1025 if (f1 || f2)
1026 {
1027 warn_odr (t1, t2, NULL, NULL, warn, warned,
1028 G_("a type with different number of methods "
1029 "is defined in another translation unit"));
1030 return false;
1031 }
1032 }
1033 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
1034 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
1035 TYPE_SIZE_UNIT (t2), 0));
1036 }
1037
1038 return true;
1039 }
1040
1041 default:
1042 gcc_unreachable ();
1043 }
1044}
1045
61a74079
JH
1046/* TYPE is equivalent to VAL by ODR, but its tree representation differs
1047 from VAL->type. This may happen in LTO where tree merging did not merge
1048 all variants of the same type. It may or may not mean the ODR violation.
1049 Add it to the list of duplicates and warn on some violations. */
1050
549bcbd1 1051static bool
61a74079
JH
1052add_type_duplicate (odr_type val, tree type)
1053{
549bcbd1 1054 bool build_bases = false;
61a74079 1055 if (!val->types_set)
6e2830c3 1056 val->types_set = new hash_set<tree>;
61a74079 1057
549bcbd1
JH
1058 /* Always prefer complete type to be the leader. */
1059 if (!COMPLETE_TYPE_P (val->type)
1060 && COMPLETE_TYPE_P (type))
1061 {
1062 tree tmp = type;
1063
1064 build_bases = true;
1065 type = val->type;
1066 val->type = tmp;
1067 }
1068
61a74079 1069 /* See if this duplicate is new. */
6e2830c3 1070 if (!val->types_set->add (type))
61a74079
JH
1071 {
1072 bool merge = true;
1073 bool base_mismatch = false;
549bcbd1 1074 unsigned int i,j;
c59f7203 1075 bool warned = false;
6e2830c3 1076 hash_set<tree> visited;
549bcbd1 1077
61a74079
JH
1078 gcc_assert (in_lto_p);
1079 vec_safe_push (val->types, type);
61a74079
JH
1080
1081 /* First we compare memory layout. */
c59f7203 1082 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
6e2830c3 1083 &warned, &visited))
61a74079
JH
1084 {
1085 merge = false;
ec77d61f 1086 odr_violation_reported = true;
549bcbd1 1087 val->odr_violated = true;
61a74079
JH
1088 if (cgraph_dump_file)
1089 {
c59f7203 1090 fprintf (cgraph_dump_file, "ODR violation\n");
61a74079
JH
1091
1092 print_node (cgraph_dump_file, "", val->type, 0);
1093 putc ('\n',cgraph_dump_file);
1094 print_node (cgraph_dump_file, "", type, 0);
1095 putc ('\n',cgraph_dump_file);
1096 }
1097 }
1098
1099 /* Next sanity check that bases are the same. If not, we will end
1100 up producing wrong answers. */
549bcbd1
JH
1101 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1102 && TREE_CODE (val->type) == RECORD_TYPE
1103 && TREE_CODE (type) == RECORD_TYPE
1104 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
61a74079 1105 {
549bcbd1
JH
1106 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1107 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1108 {
1109 odr_type base = get_odr_type
1110 (BINFO_TYPE
1111 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1112 i)),
1113 true);
1114 if (val->bases.length () <= j || val->bases[j] != base)
1115 base_mismatch = true;
1116 j++;
1117 }
1118 if (base_mismatch)
61a74079 1119 {
549bcbd1
JH
1120 merge = false;
1121 odr_violation_reported = true;
1122
c59f7203
JH
1123 if (!warned && !val->odr_violated)
1124 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1125 "a type with the same name but different bases is "
1126 "defined in another translation unit");
549bcbd1
JH
1127 val->odr_violated = true;
1128 if (cgraph_dump_file)
1129 {
1130 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1131
1132 print_node (cgraph_dump_file, "", val->type, 0);
1133 putc ('\n',cgraph_dump_file);
1134 print_node (cgraph_dump_file, "", type, 0);
1135 putc ('\n',cgraph_dump_file);
1136 }
61a74079
JH
1137 }
1138 }
1139
1140 /* Regularize things a little. During LTO same types may come with
1141 different BINFOs. Either because their virtual table was
1142 not merged by tree merging and only later at decl merging or
1143 because one type comes with external vtable, while other
1144 with internal. We want to merge equivalent binfos to conserve
1145 memory and streaming overhead.
1146
1147 The external vtables are more harmful: they contain references
1148 to external declarations of methods that may be defined in the
1149 merged LTO unit. For this reason we absolutely need to remove
1150 them and replace by internal variants. Not doing so will lead
1151 to incomplete answers from possible_polymorphic_call_targets. */
549bcbd1
JH
1152 if (!flag_ltrans && merge
1153 && TREE_CODE (val->type) == RECORD_TYPE
1154 && TREE_CODE (type) == RECORD_TYPE
1155 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1156 && TYPE_MAIN_VARIANT (type) == type
1157 && TYPE_MAIN_VARIANT (val->type) == val->type
1158 && BINFO_VTABLE (TYPE_BINFO (val->type))
1159 && BINFO_VTABLE (TYPE_BINFO (type)))
61a74079
JH
1160 {
1161 tree master_binfo = TYPE_BINFO (val->type);
1162 tree v1 = BINFO_VTABLE (master_binfo);
1163 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1164
1165 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1166 {
1167 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1168 && operand_equal_p (TREE_OPERAND (v1, 1),
1169 TREE_OPERAND (v2, 1), 0));
1170 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1171 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1172 }
1173 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1174 == DECL_ASSEMBLER_NAME (v2));
1175
1176 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1177 {
1178 unsigned int i;
1179
c7e1befa 1180 set_type_binfo (val->type, TYPE_BINFO (type));
c3284718 1181 for (i = 0; i < val->types->length (); i++)
61a74079
JH
1182 {
1183 if (TYPE_BINFO ((*val->types)[i])
1184 == master_binfo)
c7e1befa 1185 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
61a74079 1186 }
c7e1befa 1187 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
61a74079
JH
1188 }
1189 else
c7e1befa 1190 set_type_binfo (type, master_binfo);
61a74079
JH
1191 }
1192 }
549bcbd1 1193 return build_bases;
61a74079
JH
1194}
1195
eefe9a99
JH
1196/* Get ODR type hash entry for TYPE. If INSERT is true, create
1197 possibly new entry. */
1198
1199odr_type
1200get_odr_type (tree type, bool insert)
1201{
1202 odr_type_d **slot;
1203 odr_type val;
1204 hashval_t hash;
549bcbd1
JH
1205 bool build_bases = false;
1206 bool insert_to_odr_array = false;
1207 int base_id = -1;
1208
1209 type = main_odr_variant (type);
eefe9a99 1210
eefe9a99 1211 hash = hash_type_name (type);
c203e8a7
TS
1212 slot
1213 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
eefe9a99
JH
1214 if (!slot)
1215 return NULL;
1216
1217 /* See if we already have entry for type. */
1218 if (*slot)
1219 {
1220 val = *slot;
1221
61a74079
JH
1222 /* With LTO we need to support multiple tree representation of
1223 the same ODR type. */
1224 if (val->type != type)
549bcbd1 1225 build_bases = add_type_duplicate (val, type);
eefe9a99
JH
1226 }
1227 else
1228 {
766090c2 1229 val = ggc_cleared_alloc<odr_type_d> ();
eefe9a99
JH
1230 val->type = type;
1231 val->bases = vNULL;
1232 val->derived_types = vNULL;
0e1474e5 1233 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
549bcbd1
JH
1234 build_bases = COMPLETE_TYPE_P (val->type);
1235 insert_to_odr_array = true;
1236 }
1237
1238 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1239 && type == TYPE_MAIN_VARIANT (type))
1240 {
1241 tree binfo = TYPE_BINFO (type);
1242 unsigned int i;
1243
1244 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1245
2d1644bf 1246 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
1247 *slot = val;
1248 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1249 /* For now record only polymorphic types. other are
1250 pointless for devirtualization and we can not precisely
1251 determine ODR equivalency of these during LTO. */
1252 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1253 {
1254 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1255 i)),
1256 true);
549bcbd1 1257 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
eefe9a99
JH
1258 base->derived_types.safe_push (val);
1259 val->bases.safe_push (base);
549bcbd1
JH
1260 if (base->id > base_id)
1261 base_id = base->id;
eefe9a99 1262 }
549bcbd1
JH
1263 }
1264 /* Ensure that type always appears after bases. */
1265 if (insert_to_odr_array)
1266 {
eefe9a99 1267 if (odr_types_ptr)
c3284718 1268 val->id = odr_types.length ();
eefe9a99
JH
1269 vec_safe_push (odr_types_ptr, val);
1270 }
549bcbd1
JH
1271 else if (base_id > val->id)
1272 {
1273 odr_types[val->id] = 0;
1274 /* Be sure we did not recorded any derived types; these may need
1275 renumbering too. */
1276 gcc_assert (val->derived_types.length() == 0);
1277 if (odr_types_ptr)
1278 val->id = odr_types.length ();
1279 vec_safe_push (odr_types_ptr, val);
1280 }
eefe9a99
JH
1281 return val;
1282}
1283
1284/* Dump ODR type T and all its derrived type. INDENT specify indentation for
1285 recusive printing. */
1286
1287static void
1288dump_odr_type (FILE *f, odr_type t, int indent=0)
1289{
1290 unsigned int i;
1291 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1292 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
1293 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1294 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
1295 if (TYPE_NAME (t->type))
1296 {
1297 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1298 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1299 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1300 }
c3284718 1301 if (t->bases.length ())
eefe9a99
JH
1302 {
1303 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 1304 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
1305 fprintf (f, " %i", t->bases[i]->id);
1306 fprintf (f, "\n");
1307 }
c3284718 1308 if (t->derived_types.length ())
eefe9a99
JH
1309 {
1310 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 1311 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
1312 dump_odr_type (f, t->derived_types[i], indent + 1);
1313 }
1314 fprintf (f, "\n");
1315}
1316
1317/* Dump the type inheritance graph. */
1318
1319static void
1320dump_type_inheritance_graph (FILE *f)
1321{
1322 unsigned int i;
0e1474e5
JH
1323 if (!odr_types_ptr)
1324 return;
eefe9a99 1325 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 1326 for (i = 0; i < odr_types.length (); i++)
eefe9a99 1327 {
549bcbd1 1328 if (odr_types[i] && odr_types[i]->bases.length () == 0)
eefe9a99
JH
1329 dump_odr_type (f, odr_types[i]);
1330 }
c3284718 1331 for (i = 0; i < odr_types.length (); i++)
61a74079 1332 {
549bcbd1 1333 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
1334 {
1335 unsigned int j;
1336 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1337 print_node (f, "", odr_types[i]->type, 0);
c3284718 1338 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
1339 {
1340 tree t;
1341 fprintf (f, "duplicate #%i\n", j);
1342 print_node (f, "", (*odr_types[i]->types)[j], 0);
1343 t = (*odr_types[i]->types)[j];
1344 while (TYPE_P (t) && TYPE_CONTEXT (t))
1345 {
1346 t = TYPE_CONTEXT (t);
1347 print_node (f, "", t, 0);
1348 }
1349 putc ('\n',f);
1350 }
1351 }
1352 }
eefe9a99
JH
1353}
1354
1355/* Given method type T, return type of class it belongs to.
1356 Lookup this pointer and get its type. */
1357
64cbf23d 1358tree
d570d364 1359method_class_type (const_tree t)
eefe9a99
JH
1360{
1361 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 1362 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
1363
1364 return TREE_TYPE (first_parm_type);
1365}
1366
1367/* Initialize IPA devirt and build inheritance tree graph. */
1368
1369void
1370build_type_inheritance_graph (void)
1371{
b270b096 1372 struct symtab_node *n;
eefe9a99
JH
1373 FILE *inheritance_dump_file;
1374 int flags;
1375
c203e8a7 1376 if (odr_hash)
eefe9a99
JH
1377 return;
1378 timevar_push (TV_IPA_INHERITANCE);
1379 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
c203e8a7 1380 odr_hash = new odr_hash_type (23);
eefe9a99
JH
1381
1382 /* We reconstruct the graph starting of types of all methods seen in the
1383 the unit. */
b270b096 1384 FOR_EACH_SYMBOL (n)
7de90a6c 1385 if (is_a <cgraph_node *> (n)
b270b096 1386 && DECL_VIRTUAL_P (n->decl)
d52f5295 1387 && n->real_symbol_p ())
549bcbd1
JH
1388 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1389 true);
b270b096
JH
1390
1391 /* Look also for virtual tables of types that do not define any methods.
1392
1393 We need it in a case where class B has virtual base of class A
1394 re-defining its virtual method and there is class C with no virtual
1395 methods with B as virtual base.
1396
1397 Here we output B's virtual method in two variant - for non-virtual
1398 and virtual inheritance. B's virtual table has non-virtual version,
1399 while C's has virtual.
1400
1401 For this reason we need to know about C in order to include both
1402 variants of B. More correctly, record_target_from_binfo should
1403 add both variants of the method when walking B, but we have no
1404 link in between them.
1405
1406 We rely on fact that either the method is exported and thus we
1407 assume it is called externally or C is in anonymous namespace and
1408 thus we will see the vtable. */
1409
7de90a6c 1410 else if (is_a <varpool_node *> (n)
b270b096
JH
1411 && DECL_VIRTUAL_P (n->decl)
1412 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1413 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1414 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
549bcbd1 1415 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
eefe9a99
JH
1416 if (inheritance_dump_file)
1417 {
1418 dump_type_inheritance_graph (inheritance_dump_file);
1419 dump_end (TDI_inheritance, inheritance_dump_file);
1420 }
1421 timevar_pop (TV_IPA_INHERITANCE);
1422}
1423
ccb05ef2
JH
1424/* Return true if N has reference from live virtual table
1425 (and thus can be a destination of polymorphic call).
1426 Be conservatively correct when callgraph is not built or
1427 if the method may be referred externally. */
1428
1429static bool
1430referenced_from_vtable_p (struct cgraph_node *node)
1431{
1432 int i;
1433 struct ipa_ref *ref;
1434 bool found = false;
1435
1436 if (node->externally_visible
7d0aa05b 1437 || DECL_EXTERNAL (node->decl)
ccb05ef2
JH
1438 || node->used_from_other_partition)
1439 return true;
1440
1441 /* Keep this test constant time.
1442 It is unlikely this can happen except for the case where speculative
1443 devirtualization introduced many speculative edges to this node.
1444 In this case the target is very likely alive anyway. */
1445 if (node->ref_list.referring.length () > 100)
1446 return true;
1447
1448 /* We need references built. */
1449 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1450 return true;
1451
d122681a 1452 for (i = 0; node->iterate_referring (i, ref); i++)
ccb05ef2
JH
1453
1454 if ((ref->use == IPA_REF_ALIAS
d52f5295 1455 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
ccb05ef2
JH
1456 || (ref->use == IPA_REF_ADDR
1457 && TREE_CODE (ref->referring->decl) == VAR_DECL
1458 && DECL_VIRTUAL_P (ref->referring->decl)))
1459 {
1460 found = true;
1461 break;
1462 }
1463 return found;
1464}
1465
68377e53 1466/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
1467 CAN_REFER specify if program can refer to the target directly.
1468 if TARGET is unknown (NULL) or it can not be inserted (for example because
1469 its body was already removed and there is no way to refer to it), clear
1470 COMPLETEP. */
eefe9a99
JH
1471
1472static void
1473maybe_record_node (vec <cgraph_node *> &nodes,
6e2830c3 1474 tree target, hash_set<tree> *inserted,
ec77d61f 1475 bool can_refer,
68377e53 1476 bool *completep)
eefe9a99 1477{
958c1d61
JH
1478 struct cgraph_node *target_node, *alias_target;
1479 enum availability avail;
88f592e3
JH
1480
1481 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1482 list of targets; the runtime effect of calling them is undefined.
1483 Only "real" virtual methods should be accounted. */
1484 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1485 return;
eefe9a99 1486
ec77d61f
JH
1487 if (!can_refer)
1488 {
1489 /* The only case when method of anonymous namespace becomes unreferable
1490 is when we completely optimized it out. */
1491 if (flag_ltrans
1492 || !target
88f592e3 1493 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
1494 *completep = false;
1495 return;
1496 }
1497
88f592e3 1498 if (!target)
68377e53
JH
1499 return;
1500
d52f5295 1501 target_node = cgraph_node::get (target);
68377e53 1502
958c1d61
JH
1503 /* Preffer alias target over aliases, so we do not get confused by
1504 fake duplicates. */
1505 if (target_node)
1506 {
d52f5295 1507 alias_target = target_node->ultimate_alias_target (&avail);
958c1d61
JH
1508 if (target_node != alias_target
1509 && avail >= AVAIL_AVAILABLE
d52f5295 1510 && target_node->get_availability ())
958c1d61
JH
1511 target_node = alias_target;
1512 }
1513
ccb05ef2
JH
1514 /* Method can only be called by polymorphic call if any
1515 of vtables refering to it are alive.
1516
1517 While this holds for non-anonymous functions, too, there are
1518 cases where we want to keep them in the list; for example
1519 inline functions with -fno-weak are static, but we still
1520 may devirtualize them when instance comes from other unit.
1521 The same holds for LTO.
1522
1523 Currently we ignore these functions in speculative devirtualization.
1524 ??? Maybe it would make sense to be more aggressive for LTO even
1525 eslewhere. */
1526 if (!flag_ltrans
1527 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1528 && (!target_node
1529 || !referenced_from_vtable_p (target_node)))
1530 ;
1531 /* See if TARGET is useful function we can deal with. */
1532 else if (target_node != NULL
1533 && (TREE_PUBLIC (target)
1534 || DECL_EXTERNAL (target)
1535 || target_node->definition)
d52f5295 1536 && target_node->real_symbol_p ())
0e1474e5 1537 {
68377e53 1538 gcc_assert (!target_node->global.inlined_to);
d52f5295 1539 gcc_assert (target_node->real_symbol_p ());
6e2830c3 1540 if (!inserted->add (target))
68377e53 1541 {
6e2830c3 1542 cached_polymorphic_call_targets->add (target_node);
68377e53
JH
1543 nodes.safe_push (target_node);
1544 }
0e1474e5 1545 }
68377e53 1546 else if (completep
2d1644bf
JH
1547 && (!type_in_anonymous_namespace_p
1548 (DECL_CONTEXT (target))
1549 || flag_ltrans))
0439a947 1550 *completep = false;
eefe9a99
JH
1551}
1552
68377e53
JH
1553/* See if BINFO's type match OUTER_TYPE. If so, lookup
1554 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
1555 method in vtable and insert method to NODES array
1556 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
1557 Otherwise recurse to base BINFOs.
1558 This match what get_binfo_at_offset does, but with offset
1559 being unknown.
1560
a3788dde
JH
1561 TYPE_BINFOS is a stack of BINFOS of types with defined
1562 virtual table seen on way from class type to BINFO.
eefe9a99
JH
1563
1564 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
1565 for virtual function in. INSERTED tracks nodes we already
1566 inserted.
3462aa02
JH
1567
1568 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
1569
1570 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
1571 */
1572
1573static void
68377e53 1574record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 1575 vec <tree> *bases_to_consider,
68377e53
JH
1576 tree binfo,
1577 tree otr_type,
a3788dde 1578 vec <tree> &type_binfos,
68377e53
JH
1579 HOST_WIDE_INT otr_token,
1580 tree outer_type,
1581 HOST_WIDE_INT offset,
6e2830c3
TS
1582 hash_set<tree> *inserted,
1583 hash_set<tree> *matched_vtables,
ec77d61f
JH
1584 bool anonymous,
1585 bool *completep)
eefe9a99
JH
1586{
1587 tree type = BINFO_TYPE (binfo);
1588 int i;
1589 tree base_binfo;
1590
eefe9a99 1591
a3788dde
JH
1592 if (BINFO_VTABLE (binfo))
1593 type_binfos.safe_push (binfo);
68377e53 1594 if (types_same_for_odr (type, outer_type))
eefe9a99 1595 {
a3788dde
JH
1596 int i;
1597 tree type_binfo = NULL;
1598
1599 /* Lookup BINFO with virtual table. For normal types it is always last
1600 binfo on stack. */
1601 for (i = type_binfos.length () - 1; i >= 0; i--)
1602 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1603 {
1604 type_binfo = type_binfos[i];
1605 break;
1606 }
1607 if (BINFO_VTABLE (binfo))
1608 type_binfos.pop ();
1609 /* If this is duplicated BINFO for base shared by virtual inheritance,
1610 we may not have its associated vtable. This is not a problem, since
1611 we will walk it on the other path. */
1612 if (!type_binfo)
6d6af792 1613 return;
68377e53
JH
1614 tree inner_binfo = get_binfo_at_offset (type_binfo,
1615 offset, otr_type);
ec77d61f
JH
1616 if (!inner_binfo)
1617 {
1618 gcc_assert (odr_violation_reported);
1619 return;
1620 }
3462aa02
JH
1621 /* For types in anonymous namespace first check if the respective vtable
1622 is alive. If not, we know the type can't be called. */
1623 if (!flag_ltrans && anonymous)
1624 {
68377e53 1625 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 1626 varpool_node *vnode;
3462aa02
JH
1627
1628 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1629 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
9041d2e6 1630 vnode = varpool_node::get (vtable);
67348ccc 1631 if (!vnode || !vnode->definition)
3462aa02
JH
1632 return;
1633 }
68377e53 1634 gcc_assert (inner_binfo);
2d1644bf 1635 if (bases_to_consider
6e2830c3
TS
1636 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1637 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
68377e53 1638 {
ec77d61f
JH
1639 bool can_refer;
1640 tree target = gimple_get_virt_method_for_binfo (otr_token,
1641 inner_binfo,
1642 &can_refer);
2d1644bf
JH
1643 if (!bases_to_consider)
1644 maybe_record_node (nodes, target, inserted, can_refer, completep);
1645 /* Destructors are never called via construction vtables. */
1646 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1647 bases_to_consider->safe_push (target);
68377e53 1648 }
eefe9a99
JH
1649 return;
1650 }
1651
1652 /* Walk bases. */
1653 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1654 /* Walking bases that have no virtual method is pointless excercise. */
1655 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 1656 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 1657 type_binfos,
68377e53 1658 otr_token, outer_type, offset, inserted,
ec77d61f 1659 matched_vtables, anonymous, completep);
a3788dde
JH
1660 if (BINFO_VTABLE (binfo))
1661 type_binfos.pop ();
eefe9a99
JH
1662}
1663
1664/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1665 of TYPE, insert them to NODES, recurse into derived nodes.
1666 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 1667 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
1668 Clear COMPLETEP if unreferable target is found.
1669
1670 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1671 all cases where BASE_SKIPPED is true (because the base is abstract
1672 class). */
eefe9a99
JH
1673
1674static void
1675possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
6e2830c3
TS
1676 hash_set<tree> *inserted,
1677 hash_set<tree> *matched_vtables,
eefe9a99
JH
1678 tree otr_type,
1679 odr_type type,
68377e53
JH
1680 HOST_WIDE_INT otr_token,
1681 tree outer_type,
ec77d61f 1682 HOST_WIDE_INT offset,
2d1644bf
JH
1683 bool *completep,
1684 vec <tree> &bases_to_consider,
1685 bool consider_construction)
eefe9a99
JH
1686{
1687 tree binfo = TYPE_BINFO (type->type);
1688 unsigned int i;
a3788dde 1689 vec <tree> type_binfos = vNULL;
2d1644bf
JH
1690 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1691
1692 /* We may need to consider types w/o instances because of possible derived
1693 types using their methods either directly or via construction vtables.
1694 We are safe to skip them when all derivations are known, since we will
1695 handle them later.
1696 This is done by recording them to BASES_TO_CONSIDER array. */
1697 if (possibly_instantiated || consider_construction)
1698 {
1699 record_target_from_binfo (nodes,
1700 (!possibly_instantiated
1701 && type_all_derivations_known_p (type->type))
1702 ? &bases_to_consider : NULL,
1703 binfo, otr_type, type_binfos, otr_token,
1704 outer_type, offset,
1705 inserted, matched_vtables,
1706 type->anonymous_namespace, completep);
1707 }
a3788dde 1708 type_binfos.release ();
c3284718 1709 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
1710 possible_polymorphic_call_targets_1 (nodes, inserted,
1711 matched_vtables,
1712 otr_type,
1713 type->derived_types[i],
2d1644bf
JH
1714 otr_token, outer_type, offset, completep,
1715 bases_to_consider, consider_construction);
eefe9a99
JH
1716}
1717
1718/* Cache of queries for polymorphic call targets.
1719
1720 Enumerating all call targets may get expensive when there are many
1721 polymorphic calls in the program, so we memoize all the previous
1722 queries and avoid duplicated work. */
1723
1724struct polymorphic_call_target_d
1725{
eefe9a99 1726 HOST_WIDE_INT otr_token;
68377e53
JH
1727 ipa_polymorphic_call_context context;
1728 odr_type type;
eefe9a99 1729 vec <cgraph_node *> targets;
a0fd3373 1730 int speculative_targets;
ec77d61f 1731 bool complete;
91bc34a9
JH
1732 int type_warning;
1733 tree decl_warning;
eefe9a99
JH
1734};
1735
1736/* Polymorphic call target cache helpers. */
1737
1738struct polymorphic_call_target_hasher
1739{
1740 typedef polymorphic_call_target_d value_type;
1741 typedef polymorphic_call_target_d compare_type;
1742 static inline hashval_t hash (const value_type *);
1743 static inline bool equal (const value_type *, const compare_type *);
1744 static inline void remove (value_type *);
1745};
1746
1747/* Return the computed hashcode for ODR_QUERY. */
1748
1749inline hashval_t
1750polymorphic_call_target_hasher::hash (const value_type *odr_query)
1751{
d313d45f
AK
1752 inchash::hash hstate (odr_query->otr_token);
1753
1754 hstate.add_wide_int (odr_query->type->id);
1755 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1756 hstate.add_wide_int (odr_query->context.offset);
68377e53 1757
3339f0bc
JH
1758 if (odr_query->context.speculative_outer_type)
1759 {
d313d45f
AK
1760 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1761 hstate.add_wide_int (odr_query->context.speculative_offset);
3339f0bc 1762 }
d313d45f
AK
1763 hstate.add_flag (odr_query->context.maybe_in_construction);
1764 hstate.add_flag (odr_query->context.maybe_derived_type);
1765 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1766 hstate.commit_flag ();
1767 return hstate.end ();
eefe9a99
JH
1768}
1769
1770/* Compare cache entries T1 and T2. */
1771
1772inline bool
1773polymorphic_call_target_hasher::equal (const value_type *t1,
1774 const compare_type *t2)
1775{
68377e53
JH
1776 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1777 && t1->context.offset == t2->context.offset
3339f0bc 1778 && t1->context.speculative_offset == t2->context.speculative_offset
68377e53 1779 && t1->context.outer_type == t2->context.outer_type
3339f0bc 1780 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
68377e53
JH
1781 && t1->context.maybe_in_construction
1782 == t2->context.maybe_in_construction
3339f0bc
JH
1783 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1784 && (t1->context.speculative_maybe_derived_type
1785 == t2->context.speculative_maybe_derived_type));
eefe9a99
JH
1786}
1787
1788/* Remove entry in polymorphic call target cache hash. */
1789
1790inline void
1791polymorphic_call_target_hasher::remove (value_type *v)
1792{
1793 v->targets.release ();
1794 free (v);
1795}
1796
1797/* Polymorphic call target query cache. */
1798
c203e8a7 1799typedef hash_table<polymorphic_call_target_hasher>
eefe9a99 1800 polymorphic_call_target_hash_type;
c203e8a7 1801static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
eefe9a99
JH
1802
1803/* Destroy polymorphic call target query cache. */
1804
1805static void
1806free_polymorphic_call_targets_hash ()
1807{
0e1474e5
JH
1808 if (cached_polymorphic_call_targets)
1809 {
c203e8a7
TS
1810 delete polymorphic_call_target_hash;
1811 polymorphic_call_target_hash = NULL;
6e2830c3 1812 delete cached_polymorphic_call_targets;
0e1474e5
JH
1813 cached_polymorphic_call_targets = NULL;
1814 }
eefe9a99
JH
1815}
1816
1817/* When virtual function is removed, we may need to flush the cache. */
1818
1819static void
1820devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1821{
0e1474e5 1822 if (cached_polymorphic_call_targets
6e2830c3 1823 && cached_polymorphic_call_targets->contains (n))
eefe9a99
JH
1824 free_polymorphic_call_targets_hash ();
1825}
1826
d570d364
JH
1827/* Return true when TYPE contains an polymorphic type and thus is interesting
1828 for devirtualization machinery. */
1829
1830bool
1831contains_polymorphic_type_p (const_tree type)
1832{
1833 type = TYPE_MAIN_VARIANT (type);
1834
1835 if (RECORD_OR_UNION_TYPE_P (type))
1836 {
1837 if (TYPE_BINFO (type)
1838 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1839 return true;
1840 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1841 if (TREE_CODE (fld) == FIELD_DECL
1842 && !DECL_ARTIFICIAL (fld)
1843 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1844 return true;
1845 return false;
1846 }
1847 if (TREE_CODE (type) == ARRAY_TYPE)
1848 return contains_polymorphic_type_p (TREE_TYPE (type));
1849 return false;
1850}
1851
4d7cf10d
JH
1852/* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1853 is contained at THIS->OFFSET. Walk the memory representation of
1854 THIS->OUTER_TYPE and find the outermost class type that match
1855 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
68377e53
JH
1856 to represent it.
1857
4d7cf10d 1858 For example when THIS represents type
68377e53
JH
1859 class A
1860 {
1861 int a;
1862 class B b;
1863 }
1864 and we look for type at offset sizeof(int), we end up with B and offset 0.
1865 If the same is produced by multiple inheritance, we end up with A and offset
1866 sizeof(int).
1867
1868 If we can not find corresponding class, give up by setting
4d7cf10d 1869 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
68377e53
JH
1870 Return true when lookup was sucesful. */
1871
4d7cf10d
JH
1872bool
1873ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
68377e53 1874{
4d7cf10d
JH
1875 tree type = outer_type;
1876 HOST_WIDE_INT cur_offset = offset;
3339f0bc
JH
1877 bool speculative = false;
1878 bool speculation_valid = false;
1879 bool valid = false;
1880
4d7cf10d 1881 if (!outer_type)
3339f0bc 1882 {
4d7cf10d
JH
1883 type = outer_type = expected_type;
1884 offset = cur_offset = 0;
3339f0bc 1885 }
91bc34a9 1886
4d7cf10d
JH
1887 if (speculative_outer_type == outer_type
1888 && (!maybe_derived_type
1889 || speculative_maybe_derived_type))
91bc34a9 1890 {
4d7cf10d
JH
1891 speculative_outer_type = NULL;
1892 speculative_offset = 0;
1893 speculative_maybe_derived_type = false;
91bc34a9
JH
1894 }
1895
3339f0bc
JH
1896 /* See if speculative type seem to be derrived from outer_type.
1897 Then speculation is valid only if it really is a derivate and derived types
1898 are allowed.
1899
1900 The test does not really look for derivate, but also accepts the case where
1901 outer_type is a field of speculative_outer_type. In this case eiter
1902 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1903 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1904 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
4d7cf10d
JH
1905 if (speculative_outer_type
1906 && speculative_offset >= offset
1907 && contains_type_p (speculative_outer_type,
1908 offset - speculative_offset,
1909 outer_type))
1910 speculation_valid = maybe_derived_type;
3339f0bc 1911 else
4d7cf10d 1912 clear_speculation ();
3339f0bc 1913
68377e53 1914 /* Find the sub-object the constant actually refers to and mark whether it is
3339f0bc
JH
1915 an artificial one (as opposed to a user-defined one).
1916
1917 This loop is performed twice; first time for outer_type and second time
1918 for speculative_outer_type. The second iteration has SPECULATIVE set. */
68377e53
JH
1919 while (true)
1920 {
1921 HOST_WIDE_INT pos, size;
1922 tree fld;
1923
1924 /* On a match, just return what we found. */
1925 if (TREE_CODE (type) == TREE_CODE (expected_type)
a0fd3373
JH
1926 && (!in_lto_p
1927 || (TREE_CODE (type) == RECORD_TYPE
1928 && TYPE_BINFO (type)
1929 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
68377e53
JH
1930 && types_same_for_odr (type, expected_type))
1931 {
3339f0bc
JH
1932 if (speculative)
1933 {
1934 gcc_assert (speculation_valid);
1935 gcc_assert (valid);
1936
1937 /* If we did not match the offset, just give up on speculation. */
4d7cf10d
JH
1938 if (cur_offset != 0
1939 || (types_same_for_odr (speculative_outer_type,
1940 outer_type)
1941 && (maybe_derived_type
1942 == speculative_maybe_derived_type)))
1943 clear_speculation ();
3339f0bc
JH
1944 return true;
1945 }
1946 else
1947 {
1948 /* Type can not contain itself on an non-zero offset. In that case
1949 just give up. */
4d7cf10d 1950 if (cur_offset != 0)
3339f0bc
JH
1951 {
1952 valid = false;
1953 goto give_up;
1954 }
1955 valid = true;
1956 /* If speculation is not valid or we determined type precisely,
1957 we are done. */
1958 if (!speculation_valid
4d7cf10d 1959 || !maybe_derived_type)
3339f0bc 1960 {
4d7cf10d 1961 clear_speculation ();
3339f0bc
JH
1962 return true;
1963 }
1964 /* Otherwise look into speculation now. */
1965 else
1966 {
1967 speculative = true;
4d7cf10d
JH
1968 type = speculative_outer_type;
1969 cur_offset = speculative_offset;
3339f0bc
JH
1970 continue;
1971 }
1972 }
68377e53
JH
1973 }
1974
1975 /* Walk fields and find corresponding on at OFFSET. */
1976 if (TREE_CODE (type) == RECORD_TYPE)
1977 {
1978 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1979 {
1980 if (TREE_CODE (fld) != FIELD_DECL)
1981 continue;
1982
1983 pos = int_bit_position (fld);
1984 size = tree_to_uhwi (DECL_SIZE (fld));
4d7cf10d 1985 if (pos <= cur_offset && (pos + size) > cur_offset)
68377e53
JH
1986 break;
1987 }
1988
1989 if (!fld)
1990 goto give_up;
1991
c7e1befa 1992 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
4d7cf10d 1993 cur_offset -= pos;
68377e53
JH
1994 /* DECL_ARTIFICIAL represents a basetype. */
1995 if (!DECL_ARTIFICIAL (fld))
1996 {
3339f0bc
JH
1997 if (!speculative)
1998 {
4d7cf10d
JH
1999 outer_type = type;
2000 offset = cur_offset;
3339f0bc
JH
2001 /* As soon as we se an field containing the type,
2002 we know we are not looking for derivations. */
4d7cf10d 2003 maybe_derived_type = false;
3339f0bc
JH
2004 }
2005 else
2006 {
4d7cf10d
JH
2007 speculative_outer_type = type;
2008 speculative_offset = cur_offset;
2009 speculative_maybe_derived_type = false;
3339f0bc 2010 }
68377e53
JH
2011 }
2012 }
2013 else if (TREE_CODE (type) == ARRAY_TYPE)
2014 {
c7e1befa 2015 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
68377e53
JH
2016
2017 /* Give up if we don't know array size. */
a336b719
JH
2018 if (!TYPE_SIZE (subtype)
2019 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
2020 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
2021 || !contains_polymorphic_type_p (subtype))
68377e53 2022 goto give_up;
4d7cf10d 2023 cur_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
68377e53 2024 type = subtype;
3339f0bc
JH
2025 if (!speculative)
2026 {
4d7cf10d
JH
2027 outer_type = type;
2028 offset = cur_offset;
2029 maybe_derived_type = false;
3339f0bc
JH
2030 }
2031 else
2032 {
4d7cf10d
JH
2033 speculative_outer_type = type;
2034 speculative_offset = cur_offset;
2035 speculative_maybe_derived_type = false;
3339f0bc 2036 }
68377e53
JH
2037 }
2038 /* Give up on anything else. */
2039 else
2040 goto give_up;
2041 }
2042
2043 /* If we failed to find subtype we look for, give up and fall back to the
2044 most generic query. */
2045give_up:
4d7cf10d 2046 clear_speculation ();
3339f0bc
JH
2047 if (valid)
2048 return true;
4d7cf10d
JH
2049 outer_type = expected_type;
2050 offset = 0;
2051 maybe_derived_type = true;
2052 maybe_in_construction = true;
e400f081
JH
2053 /* POD can be changed to an instance of a polymorphic type by
2054 placement new. Here we play safe and assume that any
2055 non-polymorphic type is POD. */
2056 if ((TREE_CODE (type) != RECORD_TYPE
2057 || !TYPE_BINFO (type)
2058 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
3339f0bc
JH
2059 && (!TYPE_SIZE (type)
2060 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
4d7cf10d 2061 || (cur_offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
e400f081
JH
2062 tree_to_uhwi (TYPE_SIZE (type)))))
2063 return true;
68377e53
JH
2064 return false;
2065}
2066
2067/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
2068
2069static bool
2070contains_type_p (tree outer_type, HOST_WIDE_INT offset,
2071 tree otr_type)
2072{
4d7cf10d
JH
2073 ipa_polymorphic_call_context context;
2074 context.offset = offset;
2075 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
2076 return context.restrict_to_inner_class (otr_type);
68377e53
JH
2077}
2078
390675c8
JH
2079/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
2080
2081static tree
85942f45
JH
2082subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2083 tree vtable)
390675c8
JH
2084{
2085 tree v = BINFO_VTABLE (binfo);
2086 int i;
2087 tree base_binfo;
85942f45 2088 unsigned HOST_WIDE_INT this_offset;
390675c8 2089
85942f45
JH
2090 if (v)
2091 {
2092 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2093 gcc_unreachable ();
2094
2095 if (offset == this_offset
2096 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2097 return binfo;
2098 }
390675c8 2099
390675c8
JH
2100 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2101 if (polymorphic_type_binfo_p (base_binfo))
2102 {
2103 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2104 if (base_binfo)
2105 return base_binfo;
2106 }
2107 return NULL;
2108}
2109
85942f45
JH
2110/* T is known constant value of virtual table pointer.
2111 Store virtual table to V and its offset to OFFSET.
2112 Return false if T does not look like virtual table reference. */
390675c8 2113
85942f45 2114bool
d570d364
JH
2115vtable_pointer_value_to_vtable (const_tree t, tree *v,
2116 unsigned HOST_WIDE_INT *offset)
390675c8
JH
2117{
2118 /* We expect &MEM[(void *)&virtual_table + 16B].
2119 We obtain object's BINFO from the context of the virtual table.
2120 This one contains pointer to virtual table represented via
2121 POINTER_PLUS_EXPR. Verify that this pointer match to what
2122 we propagated through.
2123
2124 In the case of virtual inheritance, the virtual tables may
2125 be nested, i.e. the offset may be different from 16 and we may
2126 need to dive into the type representation. */
85942f45 2127 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
2128 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2129 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2130 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2131 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2132 == VAR_DECL)
2133 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2134 (TREE_OPERAND (t, 0), 0), 0)))
2135 {
85942f45
JH
2136 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2137 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2138 return true;
390675c8 2139 }
85942f45
JH
2140
2141 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2142 We need to handle it when T comes from static variable initializer or
2143 BINFO. */
2144 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2145 {
2146 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2147 t = TREE_OPERAND (t, 0);
2148 }
2149 else
2150 *offset = 0;
2151
2152 if (TREE_CODE (t) != ADDR_EXPR)
2153 return false;
2154 *v = TREE_OPERAND (t, 0);
2155 return true;
2156}
2157
2158/* T is known constant value of virtual table pointer. Return BINFO of the
2159 instance type. */
2160
2161tree
d570d364 2162vtable_pointer_value_to_binfo (const_tree t)
85942f45
JH
2163{
2164 tree vtable;
2165 unsigned HOST_WIDE_INT offset;
2166
2167 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2168 return NULL_TREE;
2169
2170 /* FIXME: for stores of construction vtables we return NULL,
2171 because we do not have BINFO for those. Eventually we should fix
2172 our representation to allow this case to be handled, too.
2173 In the case we see store of BINFO we however may assume
2174 that standard folding will be ale to cope with it. */
2175 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2176 offset, vtable);
390675c8
JH
2177}
2178
058d0a90
JH
2179/* We know that the instance is stored in variable or parameter
2180 (not dynamically allocated) and we want to disprove the fact
2181 that it may be in construction at invocation of CALL.
2182
2183 For the variable to be in construction we actually need to
2184 be in constructor of corresponding global variable or
2185 the inline stack of CALL must contain the constructor.
2186 Check this condition. This check works safely only before
2187 IPA passes, because inline stacks may become out of date
2188 later. */
2189
2190bool
2191decl_maybe_in_construction_p (tree base, tree outer_type,
2192 gimple call, tree function)
2193{
2194 outer_type = TYPE_MAIN_VARIANT (outer_type);
2195 gcc_assert (DECL_P (base));
2196
2197 /* After inlining the code unification optimizations may invalidate
2198 inline stacks. Also we need to give up on global variables after
2199 IPA, because addresses of these may have been propagated to their
2200 constructors. */
2201 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2202 return true;
2203
2204 /* Pure functions can not do any changes on the dynamic type;
2205 that require writting to memory. */
2206 if (!auto_var_in_fn_p (base, function)
2207 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2208 return false;
2209
2210 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2211 block = BLOCK_SUPERCONTEXT (block))
2212 if (BLOCK_ABSTRACT_ORIGIN (block)
2213 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2214 {
2215 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2216
2217 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2218 || (!DECL_CXX_CONSTRUCTOR_P (fn)
7d0aa05b 2219 && !DECL_CXX_DESTRUCTOR_P (fn)))
058d0a90
JH
2220 {
2221 /* Watch for clones where we constant propagated the first
2222 argument (pointer to the instance). */
2223 fn = DECL_ABSTRACT_ORIGIN (fn);
2224 if (!fn
2225 || !is_global_var (base)
2226 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2227 || (!DECL_CXX_CONSTRUCTOR_P (fn)
7d0aa05b 2228 && !DECL_CXX_DESTRUCTOR_P (fn)))
058d0a90
JH
2229 continue;
2230 }
2231 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2232 continue;
2233
2234 /* FIXME: this can go away once we have ODR types equivalency on
2235 LTO level. */
2236 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2237 return true;
2238 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2239 if (types_same_for_odr (type, outer_type))
2240 return true;
2241 }
2242
2243 if (TREE_CODE (base) == VAR_DECL
2244 && is_global_var (base))
2245 {
2246 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2247 || (!DECL_CXX_CONSTRUCTOR_P (function)
7d0aa05b 2248 && !DECL_CXX_DESTRUCTOR_P (function)))
058d0a90
JH
2249 {
2250 if (!DECL_ABSTRACT_ORIGIN (function))
2251 return false;
2252 /* Watch for clones where we constant propagated the first
2253 argument (pointer to the instance). */
2254 function = DECL_ABSTRACT_ORIGIN (function);
2255 if (!function
2256 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2257 || (!DECL_CXX_CONSTRUCTOR_P (function)
7d0aa05b 2258 && !DECL_CXX_DESTRUCTOR_P (function)))
058d0a90
JH
2259 return false;
2260 }
2261 /* FIXME: this can go away once we have ODR types equivalency on
2262 LTO level. */
2263 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2264 return true;
2265 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2266 if (types_same_for_odr (type, outer_type))
2267 return true;
2268 }
2269 return false;
2270}
2271
5bccb77a
JH
2272/* Proudce polymorphic call context for call method of instance
2273 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2274
2275static void
2276get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2277 tree base, HOST_WIDE_INT offset)
2278{
2279 gcc_assert (DECL_P (base));
2280
c7e1befa 2281 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
5bccb77a 2282 context->offset = offset;
3339f0bc
JH
2283 context->speculative_outer_type = NULL;
2284 context->speculative_offset = 0;
2285 context->speculative_maybe_derived_type = true;
5bccb77a
JH
2286 /* Make very conservative assumption that all objects
2287 may be in construction.
2288 TODO: ipa-prop already contains code to tell better.
2289 merge it later. */
2290 context->maybe_in_construction = true;
2291 context->maybe_derived_type = false;
2292}
2293
2294/* CST is an invariant (address of decl), try to get meaningful
2295 polymorphic call context for polymorphic call of method
2296 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2297 Return FALSE if nothing meaningful can be found. */
2298
2299bool
2300get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2301 tree cst,
2302 tree otr_type,
2303 HOST_WIDE_INT offset)
2304{
2305 HOST_WIDE_INT offset2, size, max_size;
2306 tree base;
2307
2308 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 2309 return false;
5bccb77a
JH
2310
2311 cst = TREE_OPERAND (cst, 0);
2312 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
2313 if (!DECL_P (base) || max_size == -1 || max_size != size)
2314 return false;
5bccb77a
JH
2315
2316 /* Only type inconsistent programs can have otr_type that is
2317 not part of outer type. */
79c7de84
EB
2318 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2319 return false;
5bccb77a 2320
79c7de84 2321 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
2322 return true;
2323}
2324
3339f0bc
JH
2325/* See if OP is SSA name initialized as a copy or by single assignment.
2326 If so, walk the SSA graph up. */
2327
2328static tree
2329walk_ssa_copies (tree op)
2330{
2331 STRIP_NOPS (op);
2332 while (TREE_CODE (op) == SSA_NAME
2333 && !SSA_NAME_IS_DEFAULT_DEF (op)
2334 && SSA_NAME_DEF_STMT (op)
2335 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2336 {
2337 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2338 return op;
2339 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2340 STRIP_NOPS (op);
2341 }
2342 return op;
2343}
2344
68377e53
JH
2345/* Given REF call in FNDECL, determine class of the polymorphic
2346 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
058d0a90
JH
2347 CALL is optional argument giving the actual statement (usually call) where
2348 the context is used.
7d0aa05b
JH
2349 Return pointer to object described by the context or an declaration if
2350 we found the instance to be stored in the static storage. */
68377e53
JH
2351
2352tree
2353get_polymorphic_call_info (tree fndecl,
2354 tree ref,
2355 tree *otr_type,
2356 HOST_WIDE_INT *otr_token,
058d0a90
JH
2357 ipa_polymorphic_call_context *context,
2358 gimple call)
68377e53
JH
2359{
2360 tree base_pointer;
2361 *otr_type = obj_type_ref_class (ref);
2362 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2363
2364 /* Set up basic info in case we find nothing interesting in the analysis. */
3339f0bc
JH
2365 context->speculative_outer_type = NULL;
2366 context->speculative_offset = 0;
2367 context->speculative_maybe_derived_type = true;
c7e1befa 2368 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
68377e53
JH
2369 context->offset = 0;
2370 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2371 context->maybe_derived_type = true;
2d1644bf 2372 context->maybe_in_construction = true;
68377e53
JH
2373
2374 /* Walk SSA for outer object. */
2375 do
2376 {
3339f0bc
JH
2377 base_pointer = walk_ssa_copies (base_pointer);
2378 if (TREE_CODE (base_pointer) == ADDR_EXPR)
68377e53
JH
2379 {
2380 HOST_WIDE_INT size, max_size;
2381 HOST_WIDE_INT offset2;
2382 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2383 &offset2, &size, &max_size);
2384
2385 /* If this is a varying address, punt. */
2386 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2387 && max_size != -1
2388 && max_size == size)
2389 {
2390 /* We found dereference of a pointer. Type of the pointer
2391 and MEM_REF is meaningless, but we can look futher. */
2392 if (TREE_CODE (base) == MEM_REF)
2393 {
2394 base_pointer = TREE_OPERAND (base, 0);
2395 context->offset
807e902e 2396 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
2397 context->outer_type = NULL;
2398 }
2399 /* We found base object. In this case the outer_type
2400 is known. */
2401 else if (DECL_P (base))
2402 {
7656ee72 2403 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
2404
2405 /* Only type inconsistent programs can have otr_type that is
2406 not part of outer type. */
7656ee72
JH
2407 if (!contains_type_p (TREE_TYPE (base),
2408 context->offset + offset2, *otr_type))
3e86c6a8
JH
2409 {
2410 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2411 code sequences; we arrange the calls to be builtin_unreachable
2412 later. */
2413 *otr_token = INT_MAX;
2414 return base_pointer;
2415 }
5bccb77a
JH
2416 get_polymorphic_call_info_for_decl (context, base,
2417 context->offset + offset2);
058d0a90
JH
2418 if (context->maybe_in_construction && call)
2419 context->maybe_in_construction
2420 = decl_maybe_in_construction_p (base,
2421 context->outer_type,
2422 call,
8e857bbf 2423 fndecl);
7d0aa05b 2424 return base;
68377e53
JH
2425 }
2426 else
2427 break;
2428 }
2429 else
2430 break;
2431 }
2432 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2433 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2434 {
2435 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2436 * BITS_PER_UNIT;
2437 base_pointer = TREE_OPERAND (base_pointer, 0);
2438 }
2439 else
2440 break;
2441 }
2442 while (true);
2443
2444 /* Try to determine type of the outer object. */
2445 if (TREE_CODE (base_pointer) == SSA_NAME
2446 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2447 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2448 {
2449 /* See if parameter is THIS pointer of a method. */
2450 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2451 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2452 {
c7e1befa
JH
2453 context->outer_type
2454 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2455 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2456
2457 /* Dynamic casting has possibly upcasted the type
2458 in the hiearchy. In this case outer type is less
2459 informative than inner type and we should forget
2460 about it. */
2461 if (!contains_type_p (context->outer_type, context->offset,
2462 *otr_type))
2463 {
2464 context->outer_type = NULL;
2465 return base_pointer;
2466 }
2467
2468 /* If the function is constructor or destructor, then
d74db8ff 2469 the type is possibly in construction, but we know
68377e53
JH
2470 it is not derived type. */
2471 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2472 || DECL_CXX_DESTRUCTOR_P (fndecl))
2473 {
2474 context->maybe_in_construction = true;
2475 context->maybe_derived_type = false;
2476 }
2477 else
2478 {
2479 context->maybe_derived_type = true;
2480 context->maybe_in_construction = false;
2481 }
2482 return base_pointer;
2483 }
2484 /* Non-PODs passed by value are really passed by invisible
2485 reference. In this case we also know the type of the
2486 object. */
2487 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2488 {
c7e1befa
JH
2489 context->outer_type
2490 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2491 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2492 /* Only type inconsistent programs can have otr_type that is
2493 not part of outer type. */
2494 if (!contains_type_p (context->outer_type, context->offset,
2495 *otr_type))
2496 {
3e86c6a8
JH
2497 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2498 code sequences; we arrange the calls to be builtin_unreachable
2499 later. */
2500 *otr_token = INT_MAX;
68377e53
JH
2501 return base_pointer;
2502 }
2503 context->maybe_derived_type = false;
2504 context->maybe_in_construction = false;
2505 return base_pointer;
2506 }
2507 }
3339f0bc
JH
2508
2509 tree base_type = TREE_TYPE (base_pointer);
2510
2511 if (TREE_CODE (base_pointer) == SSA_NAME
2512 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2513 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2514 {
2515 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2516 code sequences; we arrange the calls to be builtin_unreachable
2517 later. */
2518 *otr_token = INT_MAX;
2519 return base_pointer;
2520 }
2521 if (TREE_CODE (base_pointer) == SSA_NAME
2522 && SSA_NAME_DEF_STMT (base_pointer)
2523 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2524 base_type = TREE_TYPE (gimple_assign_rhs1
2525 (SSA_NAME_DEF_STMT (base_pointer)));
2526
2527 if (POINTER_TYPE_P (base_type)
2528 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2529 context->offset,
2530 *otr_type))
2531 {
2532 context->speculative_outer_type = TYPE_MAIN_VARIANT
2533 (TREE_TYPE (base_type));
2534 context->speculative_offset = context->offset;
2535 context->speculative_maybe_derived_type = true;
2536 }
68377e53
JH
2537 /* TODO: There are multiple ways to derive a type. For instance
2538 if BASE_POINTER is passed to an constructor call prior our refernece.
2539 We do not make this type of flow sensitive analysis yet. */
2540 return base_pointer;
2541}
2542
7d0aa05b
JH
2543/* Structure to be passed in between detect_type_change and
2544 check_stmt_for_type_change. */
2545
2546struct type_change_info
2547{
2548 /* Offset into the object where there is the virtual method pointer we are
2549 looking for. */
2550 HOST_WIDE_INT offset;
2551 /* The declaration or SSA_NAME pointer of the base that we are checking for
2552 type change. */
2553 tree instance;
2554 /* The reference to virtual table pointer used. */
2555 tree vtbl_ptr_ref;
2556 tree otr_type;
2557 /* If we actually can tell the type that the object has changed to, it is
2558 stored in this field. Otherwise it remains NULL_TREE. */
2559 tree known_current_type;
2560 HOST_WIDE_INT known_current_offset;
2561
2562 /* Set to true if dynamic type change has been detected. */
2563 bool type_maybe_changed;
2564 /* Set to true if multiple types have been encountered. known_current_type
2565 must be disregarded in that case. */
2566 bool multiple_types_encountered;
2567 /* Set to true if we possibly missed some dynamic type changes and we should
2568 consider the set to be speculative. */
2569 bool speculative;
2570 bool seen_unanalyzed_store;
2571};
2572
2573/* Return true if STMT is not call and can modify a virtual method table pointer.
2574 We take advantage of fact that vtable stores must appear within constructor
2575 and destructor functions. */
2576
2577bool
2578noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2579{
2580 if (is_gimple_assign (stmt))
2581 {
2582 tree lhs = gimple_assign_lhs (stmt);
2583
2584 if (gimple_clobber_p (stmt))
2585 return false;
2586 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2587 {
2588 if (flag_strict_aliasing
2589 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2590 return false;
2591
2592 if (TREE_CODE (lhs) == COMPONENT_REF
2593 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2594 return false;
2595 /* In the future we might want to use get_base_ref_and_offset to find
2596 if there is a field corresponding to the offset and if so, proceed
2597 almost like if it was a component ref. */
2598 }
2599 }
2600
2601 /* Code unification may mess with inline stacks. */
2602 if (cfun->after_inlining)
2603 return true;
2604
2605 /* Walk the inline stack and watch out for ctors/dtors.
2606 TODO: Maybe we can require the store to appear in toplevel
2607 block of CTOR/DTOR. */
2608 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2609 block = BLOCK_SUPERCONTEXT (block))
2610 if (BLOCK_ABSTRACT_ORIGIN (block)
2611 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2612 {
2613 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2614
2615 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2616 return false;
2617 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2618 && (DECL_CXX_CONSTRUCTOR_P (fn)
2619 || DECL_CXX_DESTRUCTOR_P (fn)));
2620 }
2621 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
2622 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
2623 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
2624}
2625
2626/* If STMT can be proved to be an assignment to the virtual method table
2627 pointer of ANALYZED_OBJ and the type associated with the new table
2628 identified, return the type. Otherwise return NULL_TREE. */
2629
2630static tree
2631extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
2632 HOST_WIDE_INT *type_offset)
2633{
2634 HOST_WIDE_INT offset, size, max_size;
a336b719 2635 tree lhs, rhs, base;
7d0aa05b
JH
2636
2637 if (!gimple_assign_single_p (stmt))
2638 return NULL_TREE;
2639
2640 lhs = gimple_assign_lhs (stmt);
2641 rhs = gimple_assign_rhs1 (stmt);
2642 if (TREE_CODE (lhs) != COMPONENT_REF
2643 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
a336b719
JH
2644 {
2645 if (dump_file)
2646 fprintf (dump_file, " LHS is not virtual table.\n");
2647 return NULL_TREE;
2648 }
7d0aa05b
JH
2649
2650 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
2651 ;
2652 else
2653 {
2654 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2655 if (offset != tci->offset
2656 || size != POINTER_SIZE
2657 || max_size != POINTER_SIZE)
a336b719
JH
2658 {
2659 if (dump_file)
2660 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
2661 (int)offset, (int)tci->offset, (int)size);
2662 return NULL_TREE;
2663 }
7d0aa05b
JH
2664 if (DECL_P (tci->instance))
2665 {
2666 if (base != tci->instance)
a336b719
JH
2667 {
2668 if (dump_file)
2669 {
2670 fprintf (dump_file, " base:");
2671 print_generic_expr (dump_file, base, TDF_SLIM);
2672 fprintf (dump_file, " does not match instance:");
2673 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2674 fprintf (dump_file, "\n");
2675 }
2676 return NULL_TREE;
2677 }
7d0aa05b
JH
2678 }
2679 else if (TREE_CODE (base) == MEM_REF)
2680 {
2681 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
2682 || !integer_zerop (TREE_OPERAND (base, 1)))
a336b719
JH
2683 {
2684 if (dump_file)
2685 {
2686 fprintf (dump_file, " base mem ref:");
2687 print_generic_expr (dump_file, base, TDF_SLIM);
2688 fprintf (dump_file, " has nonzero offset or does not match instance:");
2689 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2690 fprintf (dump_file, "\n");
2691 }
2692 return NULL_TREE;
2693 }
7d0aa05b
JH
2694 }
2695 else if (!operand_equal_p (tci->instance, base, 0)
2696 || tci->offset)
a336b719
JH
2697 {
2698 if (dump_file)
2699 {
2700 fprintf (dump_file, " base:");
2701 print_generic_expr (dump_file, base, TDF_SLIM);
2702 fprintf (dump_file, " does not match instance:");
2703 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2704 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
2705 }
2706 return NULL_TREE;
2707 }
7d0aa05b
JH
2708 }
2709
a336b719
JH
2710 tree vtable;
2711 unsigned HOST_WIDE_INT offset2;
2712
2713 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
2714 {
2715 if (dump_file)
2716 fprintf (dump_file, " Failed to lookup binfo\n");
2717 return NULL;
2718 }
7d0aa05b 2719
a336b719
JH
2720 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2721 offset2, vtable);
7d0aa05b 2722 if (!binfo)
a336b719
JH
2723 {
2724 if (dump_file)
2725 fprintf (dump_file, " Construction vtable used\n");
2726 /* FIXME: We should suport construction contextes. */
2727 return NULL;
2728 }
2729
7d0aa05b 2730 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
a336b719 2731 return DECL_CONTEXT (vtable);
7d0aa05b
JH
2732}
2733
2734/* Record dynamic type change of TCI to TYPE. */
2735
2736void
2737record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
2738{
2739 if (dump_file)
2740 {
2741 if (type)
2742 {
2743 fprintf (dump_file, " Recording type: ");
2744 print_generic_expr (dump_file, type, TDF_SLIM);
2745 fprintf (dump_file, " at offset %i\n", (int)offset);
2746 }
2747 else
2748 fprintf (dump_file, " Recording unknown type\n");
2749 }
a336b719
JH
2750
2751 /* If we found a constructor of type that is not polymorphic or
2752 that may contain the type in question as a field (not as base),
2753 restrict to the inner class first to make type matching bellow
2754 happier. */
2755 if (type
2756 && (offset
2757 || (TREE_CODE (type) != RECORD_TYPE
2758 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
2759 {
2760 ipa_polymorphic_call_context context;
2761
2762 context.offset = offset;
2763 context.outer_type = type;
2764 context.maybe_in_construction = false;
2765 context.maybe_derived_type = false;
2766 /* If we failed to find the inner type, we know that the call
2767 would be undefined for type produced here. */
2768 if (!context.restrict_to_inner_class (tci->otr_type))
2769 {
2770 if (dump_file)
2771 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
2772 return;
2773 }
2774 /* Watch for case we reached an POD type and anticipate placement
2775 new. */
2776 if (!context.maybe_derived_type)
2777 {
2778 type = context.outer_type;
2779 offset = context.offset;
2780 }
2781 }
7d0aa05b 2782 if (tci->type_maybe_changed
a336b719 2783 && (!types_same_for_odr (type, tci->known_current_type)
7d0aa05b
JH
2784 || offset != tci->known_current_offset))
2785 tci->multiple_types_encountered = true;
a336b719 2786 tci->known_current_type = TYPE_MAIN_VARIANT (type);
7d0aa05b
JH
2787 tci->known_current_offset = offset;
2788 tci->type_maybe_changed = true;
2789}
2790
2791/* Callback of walk_aliased_vdefs and a helper function for
2792 detect_type_change to check whether a particular statement may modify
2793 the virtual table pointer, and if possible also determine the new type of
2794 the (sub-)object. It stores its result into DATA, which points to a
2795 type_change_info structure. */
2796
2797static bool
2798check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
2799{
2800 gimple stmt = SSA_NAME_DEF_STMT (vdef);
2801 struct type_change_info *tci = (struct type_change_info *) data;
2802 tree fn;
2803
2804 /* If we already gave up, just terminate the rest of walk. */
2805 if (tci->multiple_types_encountered)
2806 return true;
2807
2808 if (is_gimple_call (stmt))
2809 {
2810 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
2811 return false;
2812
2813 /* Check for a constructor call. */
2814 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
2815 && DECL_CXX_CONSTRUCTOR_P (fn)
2816 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2817 && gimple_call_num_args (stmt))
2818 {
2819 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
2820 tree type = method_class_type (TREE_TYPE (fn));
2821 HOST_WIDE_INT offset = 0, size, max_size;
2822
2823 if (dump_file)
2824 {
2825 fprintf (dump_file, " Checking constructor call: ");
2826 print_gimple_stmt (dump_file, stmt, 0, 0);
2827 }
2828
2829 /* See if THIS parameter seems like instance pointer. */
2830 if (TREE_CODE (op) == ADDR_EXPR)
2831 {
2832 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
2833 &offset, &size, &max_size);
2834 if (size != max_size || max_size == -1)
2835 {
2836 tci->speculative = true;
2837 return false;
2838 }
2839 if (op && TREE_CODE (op) == MEM_REF)
2840 {
2841 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
2842 {
2843 tci->speculative = true;
2844 return false;
2845 }
2846 offset += tree_to_shwi (TREE_OPERAND (op, 1))
2847 * BITS_PER_UNIT;
2848 op = TREE_OPERAND (op, 0);
2849 }
80b6ba28
JH
2850 else if (DECL_P (op))
2851 ;
7d0aa05b
JH
2852 else
2853 {
2854 tci->speculative = true;
2855 return false;
2856 }
2857 op = walk_ssa_copies (op);
2858 }
2859 if (operand_equal_p (op, tci->instance, 0)
2860 && TYPE_SIZE (type)
2861 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2862 && tree_fits_shwi_p (TYPE_SIZE (type))
2863 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
2864 {
2865 record_known_type (tci, type, tci->offset - offset);
2866 return true;
2867 }
2868 }
2869 /* Calls may possibly change dynamic type by placement new. Assume
2870 it will not happen, but make result speculative only. */
2871 if (dump_file)
2872 {
2873 fprintf (dump_file, " Function call may change dynamic type:");
2874 print_gimple_stmt (dump_file, stmt, 0, 0);
2875 }
2876 tci->speculative = true;
2877 return false;
2878 }
2879 /* Check for inlined virtual table store. */
2880 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
2881 {
2882 tree type;
2883 HOST_WIDE_INT offset = 0;
2884 if (dump_file)
2885 {
2886 fprintf (dump_file, " Checking vtbl store: ");
2887 print_gimple_stmt (dump_file, stmt, 0, 0);
2888 }
2889
2890 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
2891 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
2892 if (!type)
2893 {
2894 if (dump_file)
2895 fprintf (dump_file, " Unanalyzed store may change type.\n");
2896 tci->seen_unanalyzed_store = true;
2897 tci->speculative = true;
2898 }
2899 else
2900 record_known_type (tci, type, offset);
2901 return true;
2902 }
2903 else
2904 return false;
2905}
2906
4d7cf10d 2907/* THIS is polymorphic call context obtained from get_polymorphic_context.
7d0aa05b
JH
2908 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
2909 INSTANCE is pointer to the outer instance as returned by
2910 get_polymorphic_context. To avoid creation of temporary expressions,
2911 INSTANCE may also be an declaration of get_polymorphic_context found the
2912 value to be in static storage.
2913
2914 If the type of instance is not fully determined
2915 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
2916 is set), try to walk memory writes and find the actual construction of the
2917 instance.
2918
2919 We do not include this analysis in the context analysis itself, because
2920 it needs memory SSA to be fully built and the walk may be expensive.
2921 So it is not suitable for use withing fold_stmt and similar uses. */
2922
2923bool
4d7cf10d
JH
2924ipa_polymorphic_call_context::get_dynamic_type (tree instance,
2925 tree otr_object,
2926 tree otr_type,
2927 gimple call)
7d0aa05b
JH
2928{
2929 struct type_change_info tci;
2930 ao_ref ao;
2931 bool function_entry_reached = false;
2932 tree instance_ref = NULL;
2933 gimple stmt = call;
a336b719
JH
2934 /* Remember OFFSET before it is modified by restrict_to_inner_class.
2935 This is because we do not update INSTANCE when walking inwards. */
2936 HOST_WIDE_INT instance_offset = offset;
2937
2938 otr_type = TYPE_MAIN_VARIANT (otr_type);
2939
2940 /* Walk into inner type. This may clear maybe_derived_type and save us
2941 from useless work. It also makes later comparsions with static type
2942 easier. */
2943 if (outer_type)
2944 {
2945 if (!restrict_to_inner_class (otr_type))
2946 return false;
2947 }
7d0aa05b 2948
4d7cf10d 2949 if (!maybe_in_construction && !maybe_derived_type)
7d0aa05b
JH
2950 return false;
2951
2952 /* We need to obtain refernce to virtual table pointer. It is better
2953 to look it up in the code rather than build our own. This require bit
2954 of pattern matching, but we end up verifying that what we found is
2955 correct.
2956
2957 What we pattern match is:
2958
2959 tmp = instance->_vptr.A; // vtbl ptr load
2960 tmp2 = tmp[otr_token]; // vtable lookup
2961 OBJ_TYPE_REF(tmp2;instance->0) (instance);
2962
2963 We want to start alias oracle walk from vtbl pointer load,
2964 but we may not be able to identify it, for example, when PRE moved the
2965 load around. */
2966
2967 if (gimple_code (call) == GIMPLE_CALL)
2968 {
2969 tree ref = gimple_call_fn (call);
2970 HOST_WIDE_INT offset2, size, max_size;
2971
2972 if (TREE_CODE (ref) == OBJ_TYPE_REF)
2973 {
2974 ref = OBJ_TYPE_REF_EXPR (ref);
2975 ref = walk_ssa_copies (ref);
2976
2977 /* Check if definition looks like vtable lookup. */
2978 if (TREE_CODE (ref) == SSA_NAME
2979 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2980 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
2981 && TREE_CODE (gimple_assign_rhs1
2982 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
2983 {
2984 ref = get_base_address
2985 (TREE_OPERAND (gimple_assign_rhs1
2986 (SSA_NAME_DEF_STMT (ref)), 0));
2987 ref = walk_ssa_copies (ref);
2988 /* Find base address of the lookup and see if it looks like
2989 vptr load. */
2990 if (TREE_CODE (ref) == SSA_NAME
2991 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2992 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
2993 {
2994 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
2995 tree base_ref = get_ref_base_and_extent
2996 (ref_exp, &offset2, &size, &max_size);
2997
2998 /* Finally verify that what we found looks like read from OTR_OBJECT
2999 or from INSTANCE with offset OFFSET. */
3000 if (base_ref
726540aa 3001 && ((TREE_CODE (base_ref) == MEM_REF
a336b719 3002 && ((offset2 == instance_offset
726540aa
JH
3003 && TREE_OPERAND (base_ref, 0) == instance)
3004 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
3005 || (DECL_P (instance) && base_ref == instance
a336b719 3006 && offset2 == instance_offset)))
7d0aa05b
JH
3007 {
3008 stmt = SSA_NAME_DEF_STMT (ref);
3009 instance_ref = ref_exp;
3010 }
3011 }
3012 }
3013 }
3014 }
3015
3016 /* If we failed to look up the refernece in code, build our own. */
3017 if (!instance_ref)
3018 {
3019 /* If the statement in question does not use memory, we can't tell
3020 anything. */
3021 if (!gimple_vuse (stmt))
3022 return false;
3023 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
3024 }
3025 else
3026 /* Otherwise use the real reference. */
3027 ao_ref_init (&ao, instance_ref);
3028
3029 /* We look for vtbl pointer read. */
3030 ao.size = POINTER_SIZE;
3031 ao.max_size = ao.size;
3032 ao.ref_alias_set
3033 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
3034
3035 if (dump_file)
3036 {
3037 fprintf (dump_file, "Determining dynamic type for call: ");
3038 print_gimple_stmt (dump_file, call, 0, 0);
3039 fprintf (dump_file, " Starting walk at: ");
3040 print_gimple_stmt (dump_file, stmt, 0, 0);
3041 fprintf (dump_file, " instance pointer: ");
3042 print_generic_expr (dump_file, otr_object, TDF_SLIM);
3043 fprintf (dump_file, " Outer instance pointer: ");
3044 print_generic_expr (dump_file, instance, TDF_SLIM);
4d7cf10d 3045 fprintf (dump_file, " offset: %i (bits)", (int)offset);
7d0aa05b
JH
3046 fprintf (dump_file, " vtbl reference: ");
3047 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
3048 fprintf (dump_file, "\n");
3049 }
3050
4d7cf10d 3051 tci.offset = offset;
7d0aa05b
JH
3052 tci.instance = instance;
3053 tci.vtbl_ptr_ref = instance_ref;
3054 gcc_assert (TREE_CODE (instance) != MEM_REF);
3055 tci.known_current_type = NULL_TREE;
3056 tci.known_current_offset = 0;
3057 tci.otr_type = otr_type;
3058 tci.type_maybe_changed = false;
3059 tci.multiple_types_encountered = false;
3060 tci.speculative = false;
3061 tci.seen_unanalyzed_store = false;
3062
3063 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
3064 &tci, NULL, &function_entry_reached);
3065
3066 /* If we did not find any type changing statements, we may still drop
3067 maybe_in_construction flag if the context already have outer type.
3068
3069 Here we make special assumptions about both constructors and
3070 destructors which are all the functions that are allowed to alter the
3071 VMT pointers. It assumes that destructors begin with assignment into
3072 all VMT pointers and that constructors essentially look in the
3073 following way:
3074
3075 1) The very first thing they do is that they call constructors of
3076 ancestor sub-objects that have them.
3077
3078 2) Then VMT pointers of this and all its ancestors is set to new
3079 values corresponding to the type corresponding to the constructor.
3080
3081 3) Only afterwards, other stuff such as constructor of member
3082 sub-objects and the code written by the user is run. Only this may
3083 include calling virtual functions, directly or indirectly.
3084
3085 4) placement new can not be used to change type of non-POD statically
3086 allocated variables.
3087
3088 There is no way to call a constructor of an ancestor sub-object in any
3089 other way.
3090
3091 This means that we do not have to care whether constructors get the
3092 correct type information because they will always change it (in fact,
3093 if we define the type to be given by the VMT pointer, it is undefined).
3094
3095 The most important fact to derive from the above is that if, for some
3096 statement in the section 3, we try to detect whether the dynamic type
3097 has changed, we can safely ignore all calls as we examine the function
3098 body backwards until we reach statements in section 2 because these
3099 calls cannot be ancestor constructors or destructors (if the input is
3100 not bogus) and so do not change the dynamic type (this holds true only
3101 for automatically allocated objects but at the moment we devirtualize
3102 only these). We then must detect that statements in section 2 change
3103 the dynamic type and can try to derive the new type. That is enough
3104 and we can stop, we will never see the calls into constructors of
3105 sub-objects in this code.
3106
4d7cf10d 3107 Therefore if the static outer type was found (outer_type)
7d0aa05b
JH
3108 we can safely ignore tci.speculative that is set on calls and give up
3109 only if there was dyanmic type store that may affect given variable
3110 (seen_unanalyzed_store) */
3111
a336b719
JH
3112 if (!tci.type_maybe_changed
3113 || (outer_type
3114 && !tci.seen_unanalyzed_store
3115 && !tci.multiple_types_encountered
3116 && offset == tci.offset
3117 && types_same_for_odr (tci.known_current_type,
3118 outer_type)))
7d0aa05b 3119 {
4d7cf10d 3120 if (!outer_type || tci.seen_unanalyzed_store)
7d0aa05b 3121 return false;
4d7cf10d
JH
3122 if (maybe_in_construction)
3123 maybe_in_construction = false;
7d0aa05b
JH
3124 if (dump_file)
3125 fprintf (dump_file, " No dynamic type change found.\n");
3126 return true;
3127 }
3128
3129 if (tci.known_current_type
3130 && !function_entry_reached
3131 && !tci.multiple_types_encountered)
3132 {
a336b719 3133 if (!tci.speculative)
7d0aa05b 3134 {
a336b719 3135 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
4d7cf10d
JH
3136 offset = tci.known_current_offset;
3137 maybe_in_construction = false;
3138 maybe_derived_type = false;
7d0aa05b
JH
3139 if (dump_file)
3140 fprintf (dump_file, " Determined dynamic type.\n");
3141 }
4d7cf10d
JH
3142 else if (!speculative_outer_type
3143 || speculative_maybe_derived_type)
7d0aa05b 3144 {
a336b719 3145 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
4d7cf10d
JH
3146 speculative_offset = tci.known_current_offset;
3147 speculative_maybe_derived_type = false;
7d0aa05b
JH
3148 if (dump_file)
3149 fprintf (dump_file, " Determined speculative dynamic type.\n");
3150 }
3151 }
3152 else if (dump_file)
a336b719
JH
3153 {
3154 fprintf (dump_file, " Found multiple types%s%s\n",
3155 function_entry_reached ? " (function entry reached)" : "",
3156 function_entry_reached ? " (multiple types encountered)" : "");
3157 }
7d0aa05b
JH
3158
3159 return true;
3160}
3161
68377e53
JH
3162/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
3163 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
3164 and insert them to NODES.
3165
3166 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
3167
3168static void
3169record_targets_from_bases (tree otr_type,
3170 HOST_WIDE_INT otr_token,
3171 tree outer_type,
3172 HOST_WIDE_INT offset,
ec77d61f 3173 vec <cgraph_node *> &nodes,
6e2830c3
TS
3174 hash_set<tree> *inserted,
3175 hash_set<tree> *matched_vtables,
68377e53
JH
3176 bool *completep)
3177{
3178 while (true)
3179 {
3180 HOST_WIDE_INT pos, size;
3181 tree base_binfo;
3182 tree fld;
3183
3184 if (types_same_for_odr (outer_type, otr_type))
3185 return;
3186
3187 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
3188 {
3189 if (TREE_CODE (fld) != FIELD_DECL)
3190 continue;
3191
3192 pos = int_bit_position (fld);
3193 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
3194 if (pos <= offset && (pos + size) > offset
3195 /* Do not get confused by zero sized bases. */
3196 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
3197 break;
3198 }
3199 /* Within a class type we should always find correcponding fields. */
3200 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3201
3202 /* Nonbasetypes should have been stripped by outer_class_type. */
3203 gcc_assert (DECL_ARTIFICIAL (fld));
3204
3205 outer_type = TREE_TYPE (fld);
3206 offset -= pos;
3207
3208 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3209 offset, otr_type);
ec77d61f
JH
3210 if (!base_binfo)
3211 {
3212 gcc_assert (odr_violation_reported);
3213 return;
3214 }
68377e53 3215 gcc_assert (base_binfo);
6e2830c3 3216 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
68377e53 3217 {
ec77d61f
JH
3218 bool can_refer;
3219 tree target = gimple_get_virt_method_for_binfo (otr_token,
3220 base_binfo,
3221 &can_refer);
2d1644bf
JH
3222 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3223 maybe_record_node (nodes, target, inserted, can_refer, completep);
6e2830c3 3224 matched_vtables->add (BINFO_VTABLE (base_binfo));
68377e53
JH
3225 }
3226 }
3227}
3228
3462aa02
JH
3229/* When virtual table is removed, we may need to flush the cache. */
3230
3231static void
2c8326a5 3232devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
3233 void *d ATTRIBUTE_UNUSED)
3234{
3235 if (cached_polymorphic_call_targets
67348ccc
DM
3236 && DECL_VIRTUAL_P (n->decl)
3237 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
3238 free_polymorphic_call_targets_hash ();
3239}
3240
91bc34a9 3241/* Record about how many calls would benefit from given type to be final. */
7d0aa05b 3242
91bc34a9
JH
3243struct odr_type_warn_count
3244{
9716cc3e 3245 tree type;
91bc34a9
JH
3246 int count;
3247 gcov_type dyn_count;
3248};
3249
3250/* Record about how many calls would benefit from given method to be final. */
7d0aa05b 3251
91bc34a9
JH
3252struct decl_warn_count
3253{
3254 tree decl;
3255 int count;
3256 gcov_type dyn_count;
3257};
3258
3259/* Information about type and decl warnings. */
7d0aa05b 3260
91bc34a9
JH
3261struct final_warning_record
3262{
3263 gcov_type dyn_count;
3264 vec<odr_type_warn_count> type_warnings;
3265 hash_map<tree, decl_warn_count> decl_warnings;
3266};
3267struct final_warning_record *final_warning_records;
3268
eefe9a99 3269/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
3270 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3271 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3272 OTR_TYPE and include their virtual method. This is useful for types
3273 possibly in construction or destruction where the virtual table may
3274 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3275 us to walk the inheritance graph for all derivations.
3276
3e86c6a8
JH
3277 OTR_TOKEN == INT_MAX is used to mark calls that are provably
3278 undefined and should be redirected to unreachable.
3279
add5c763 3280 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
3281 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3282 in the target cache. If user needs to visit every target list
3283 just once, it can memoize them.
3284
a0fd3373
JH
3285 SPECULATION_TARGETS specify number of targets that are speculatively
3286 likely. These include targets specified by the speculative part
3287 of polymoprhic call context and also exclude all targets for classes
3288 in construction.
ec77d61f 3289
eefe9a99
JH
3290 Returned vector is placed into cache. It is NOT caller's responsibility
3291 to free it. The vector can be freed on cgraph_remove_node call if
3292 the particular node is a virtual function present in the cache. */
3293
3294vec <cgraph_node *>
3295possible_polymorphic_call_targets (tree otr_type,
3296 HOST_WIDE_INT otr_token,
68377e53
JH
3297 ipa_polymorphic_call_context context,
3298 bool *completep,
ec77d61f 3299 void **cache_token,
a0fd3373 3300 int *speculative_targetsp)
eefe9a99
JH
3301{
3302 static struct cgraph_node_hook_list *node_removal_hook_holder;
add5c763 3303 vec <cgraph_node *> nodes = vNULL;
2d1644bf 3304 vec <tree> bases_to_consider = vNULL;
68377e53 3305 odr_type type, outer_type;
eefe9a99
JH
3306 polymorphic_call_target_d key;
3307 polymorphic_call_target_d **slot;
3308 unsigned int i;
3309 tree binfo, target;
ec77d61f
JH
3310 bool complete;
3311 bool can_refer;
2d1644bf 3312 bool skipped = false;
eefe9a99 3313
c7e1befa
JH
3314 otr_type = TYPE_MAIN_VARIANT (otr_type);
3315
3e86c6a8 3316 /* If ODR is not initialized, return empty incomplete list. */
c203e8a7 3317 if (!odr_hash)
79c7de84
EB
3318 {
3319 if (completep)
3320 *completep = false;
beb683ab
MJ
3321 if (cache_token)
3322 *cache_token = NULL;
a0fd3373
JH
3323 if (speculative_targetsp)
3324 *speculative_targetsp = 0;
79c7de84
EB
3325 return nodes;
3326 }
add5c763 3327
3e86c6a8
JH
3328 /* If we hit type inconsistency, just return empty list of targets. */
3329 if (otr_token == INT_MAX)
3330 {
3331 if (completep)
3332 *completep = true;
beb683ab
MJ
3333 if (cache_token)
3334 *cache_token = NULL;
a0fd3373
JH
3335 if (speculative_targetsp)
3336 *speculative_targetsp = 0;
3e86c6a8
JH
3337 return nodes;
3338 }
3339
91bc34a9
JH
3340 /* Do not bother to compute speculative info when user do not asks for it. */
3341 if (!speculative_targetsp || !context.speculative_outer_type)
4d7cf10d 3342 context.clear_speculation ();
91bc34a9 3343
68377e53 3344 type = get_odr_type (otr_type, true);
eefe9a99 3345
c7e1befa
JH
3346 /* Recording type variants would wast results cache. */
3347 gcc_assert (!context.outer_type
3348 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3349
68377e53 3350 /* Lookup the outer class type we want to walk. */
a0fd3373 3351 if ((context.outer_type || context.speculative_outer_type)
4d7cf10d 3352 && !context.restrict_to_inner_class (otr_type))
3e86c6a8
JH
3353 {
3354 if (completep)
3355 *completep = false;
beb683ab
MJ
3356 if (cache_token)
3357 *cache_token = NULL;
a0fd3373
JH
3358 if (speculative_targetsp)
3359 *speculative_targetsp = 0;
3e86c6a8
JH
3360 return nodes;
3361 }
eefe9a99 3362
4d7cf10d 3363 /* Check that restrict_to_inner_class kept the main variant. */
c7e1befa
JH
3364 gcc_assert (!context.outer_type
3365 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3366
79c7de84 3367 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
3368
3369 /* Without outer type, we have no use for offset. Just do the
3370 basic search from innter type */
3371 if (!context.outer_type)
3372 {
3373 context.outer_type = otr_type;
3374 context.offset = 0;
3375 }
3376 /* We need to update our hiearchy if the type does not exist. */
3377 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 3378 /* If the type is complete, there are no derivations. */
68377e53
JH
3379 if (TYPE_FINAL_P (outer_type->type))
3380 context.maybe_derived_type = false;
eefe9a99
JH
3381
3382 /* Initialize query cache. */
3383 if (!cached_polymorphic_call_targets)
3384 {
6e2830c3 3385 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
c203e8a7
TS
3386 polymorphic_call_target_hash
3387 = new polymorphic_call_target_hash_type (23);
eefe9a99 3388 if (!node_removal_hook_holder)
3462aa02
JH
3389 {
3390 node_removal_hook_holder =
3391 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
3392 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
3393 NULL);
3394 }
eefe9a99
JH
3395 }
3396
3397 /* Lookup cached answer. */
3398 key.type = type;
3399 key.otr_token = otr_token;
68377e53 3400 key.context = context;
c203e8a7 3401 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
eefe9a99
JH
3402 if (cache_token)
3403 *cache_token = (void *)*slot;
3404 if (*slot)
68377e53
JH
3405 {
3406 if (completep)
ec77d61f 3407 *completep = (*slot)->complete;
a0fd3373
JH
3408 if (speculative_targetsp)
3409 *speculative_targetsp = (*slot)->speculative_targets;
91bc34a9
JH
3410 if ((*slot)->type_warning && final_warning_records)
3411 {
3412 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3413 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3414 += final_warning_records->dyn_count;
3415 }
3416 if ((*slot)->decl_warning && final_warning_records)
3417 {
3418 struct decl_warn_count *c =
3419 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3420 c->count++;
3421 c->dyn_count += final_warning_records->dyn_count;
3422 }
68377e53
JH
3423 return (*slot)->targets;
3424 }
3425
ec77d61f 3426 complete = true;
eefe9a99
JH
3427
3428 /* Do actual search. */
3429 timevar_push (TV_IPA_VIRTUAL_CALL);
3430 *slot = XCNEW (polymorphic_call_target_d);
3431 if (cache_token)
68377e53 3432 *cache_token = (void *)*slot;
eefe9a99
JH
3433 (*slot)->type = type;
3434 (*slot)->otr_token = otr_token;
68377e53 3435 (*slot)->context = context;
a0fd3373 3436 (*slot)->speculative_targets = 0;
eefe9a99 3437
6e2830c3
TS
3438 hash_set<tree> inserted;
3439 hash_set<tree> matched_vtables;
eefe9a99 3440
91bc34a9 3441 /* First insert targets we speculatively identified as likely. */
a0fd3373
JH
3442 if (context.speculative_outer_type)
3443 {
3444 odr_type speculative_outer_type;
91bc34a9
JH
3445 bool speculation_complete = true;
3446
3447 /* First insert target from type itself and check if it may have derived types. */
a0fd3373
JH
3448 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3449 if (TYPE_FINAL_P (speculative_outer_type->type))
3450 context.speculative_maybe_derived_type = false;
3451 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3452 context.speculative_offset, otr_type);
3453 if (binfo)
3454 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3455 &can_refer);
3456 else
3457 target = NULL;
3458
91bc34a9
JH
3459 /* In the case we get complete method, we don't need
3460 to walk derivations. */
3461 if (target && DECL_FINAL_P (target))
3462 context.speculative_maybe_derived_type = false;
a0fd3373 3463 if (type_possibly_instantiated_p (speculative_outer_type->type))
91bc34a9 3464 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
a0fd3373 3465 if (binfo)
6e2830c3 3466 matched_vtables.add (BINFO_VTABLE (binfo));
91bc34a9 3467
9716cc3e 3468
a0fd3373
JH
3469 /* Next walk recursively all derived types. */
3470 if (context.speculative_maybe_derived_type)
91bc34a9
JH
3471 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3472 possible_polymorphic_call_targets_1 (nodes, &inserted,
3473 &matched_vtables,
3474 otr_type,
3475 speculative_outer_type->derived_types[i],
3476 otr_token, speculative_outer_type->type,
3477 context.speculative_offset,
3478 &speculation_complete,
3479 bases_to_consider,
3480 false);
a0fd3373
JH
3481 (*slot)->speculative_targets = nodes.length();
3482 }
3483
eefe9a99 3484 /* First see virtual method of type itself. */
68377e53
JH
3485 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3486 context.offset, otr_type);
ec77d61f
JH
3487 if (binfo)
3488 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3489 &can_refer);
3490 else
68377e53 3491 {
ec77d61f
JH
3492 gcc_assert (odr_violation_reported);
3493 target = NULL;
3494 }
68377e53 3495
2d1644bf
JH
3496 /* Destructors are never called through construction virtual tables,
3497 because the type is always known. */
3498 if (target && DECL_CXX_DESTRUCTOR_P (target))
3499 context.maybe_in_construction = false;
ec77d61f
JH
3500
3501 if (target)
3502 {
3503 /* In the case we get complete method, we don't need
68377e53
JH
3504 to walk derivations. */
3505 if (DECL_FINAL_P (target))
3506 context.maybe_derived_type = false;
3507 }
2d1644bf
JH
3508
3509 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3510 if (type_possibly_instantiated_p (outer_type->type))
6e2830c3 3511 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
ec77d61f 3512 else
2d1644bf
JH
3513 {
3514 skipped = true;
3515 gcc_assert (in_lto_p || context.maybe_derived_type);
3516 }
79c7de84 3517
549bcbd1 3518 if (binfo)
6e2830c3 3519 matched_vtables.add (BINFO_VTABLE (binfo));
eefe9a99 3520
ec77d61f 3521 /* Next walk recursively all derived types. */
68377e53
JH
3522 if (context.maybe_derived_type)
3523 {
68377e53 3524 for (i = 0; i < outer_type->derived_types.length(); i++)
6e2830c3
TS
3525 possible_polymorphic_call_targets_1 (nodes, &inserted,
3526 &matched_vtables,
79c7de84
EB
3527 otr_type,
3528 outer_type->derived_types[i],
68377e53 3529 otr_token, outer_type->type,
2d1644bf
JH
3530 context.offset, &complete,
3531 bases_to_consider,
3532 context.maybe_in_construction);
91bc34a9
JH
3533
3534 if (!outer_type->all_derivations_known)
3535 {
3536 if (final_warning_records)
3537 {
3538 if (complete
3539 && nodes.length () == 1
3540 && warn_suggest_final_types
3541 && !outer_type->derived_types.length ())
3542 {
3543 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3544 final_warning_records->type_warnings.safe_grow_cleared
3545 (odr_types.length ());
3546 final_warning_records->type_warnings[outer_type->id].count++;
3547 final_warning_records->type_warnings[outer_type->id].dyn_count
3548 += final_warning_records->dyn_count;
9716cc3e
JH
3549 final_warning_records->type_warnings[outer_type->id].type
3550 = outer_type->type;
91bc34a9
JH
3551 (*slot)->type_warning = outer_type->id + 1;
3552 }
3553 if (complete
3554 && warn_suggest_final_methods
3555 && nodes.length () == 1
3556 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3557 outer_type->type))
3558 {
3559 bool existed;
3560 struct decl_warn_count &c =
3561 final_warning_records->decl_warnings.get_or_insert
3562 (nodes[0]->decl, &existed);
3563
3564 if (existed)
3565 {
3566 c.count++;
3567 c.dyn_count += final_warning_records->dyn_count;
3568 }
3569 else
3570 {
3571 c.count = 1;
3572 c.dyn_count = final_warning_records->dyn_count;
3573 c.decl = nodes[0]->decl;
3574 }
3575 (*slot)->decl_warning = nodes[0]->decl;
3576 }
3577 }
3578 complete = false;
3579 }
68377e53 3580 }
79c7de84 3581
ec77d61f 3582 /* Finally walk bases, if asked to. */
a0fd3373
JH
3583 if (!(*slot)->speculative_targets)
3584 (*slot)->speculative_targets = nodes.length();
2d1644bf
JH
3585
3586 /* Destructors are never called through construction virtual tables,
3587 because the type is always known. One of entries may be cxa_pure_virtual
3588 so look to at least two of them. */
3589 if (context.maybe_in_construction)
3590 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3591 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3592 context.maybe_in_construction = false;
ec77d61f 3593 if (context.maybe_in_construction)
2d1644bf
JH
3594 {
3595 if (type != outer_type
3596 && (!skipped
3597 || (context.maybe_derived_type
3598 && !type_all_derivations_known_p (outer_type->type))))
3599 record_targets_from_bases (otr_type, otr_token, outer_type->type,
6e2830c3
TS
3600 context.offset, nodes, &inserted,
3601 &matched_vtables, &complete);
2d1644bf 3602 if (skipped)
6e2830c3 3603 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2d1644bf 3604 for (i = 0; i < bases_to_consider.length(); i++)
6e2830c3 3605 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2d1644bf
JH
3606 }
3607 bases_to_consider.release();
ec77d61f 3608
eefe9a99 3609 (*slot)->targets = nodes;
ec77d61f 3610 (*slot)->complete = complete;
68377e53 3611 if (completep)
ec77d61f 3612 *completep = complete;
a0fd3373
JH
3613 if (speculative_targetsp)
3614 *speculative_targetsp = (*slot)->speculative_targets;
eefe9a99 3615
eefe9a99
JH
3616 timevar_pop (TV_IPA_VIRTUAL_CALL);
3617 return nodes;
3618}
3619
91bc34a9
JH
3620bool
3621add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3622 vec<const decl_warn_count*> *vec)
3623{
3624 vec->safe_push (&value);
3625 return true;
3626}
3627
eefe9a99
JH
3628/* Dump all possible targets of a polymorphic call. */
3629
3630void
3631dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
3632 tree otr_type,
3633 HOST_WIDE_INT otr_token,
3634 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
3635{
3636 vec <cgraph_node *> targets;
3637 bool final;
549bcbd1 3638 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
eefe9a99 3639 unsigned int i;
a0fd3373 3640 int speculative;
eefe9a99
JH
3641
3642 if (!type)
3643 return;
3644 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 3645 ctx,
a0fd3373 3646 &final, NULL, &speculative);
68377e53 3647 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 3648 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
3649 fprintf (f, " token %i\n", (int)otr_token);
3650 if (ctx.outer_type || ctx.offset)
3651 {
3652 fprintf (f, " Contained in type:");
3653 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
3654 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3655 ctx.offset);
3656 }
a0fd3373
JH
3657 if (ctx.speculative_outer_type)
3658 {
3659 fprintf (f, " Speculatively contained in type:");
3660 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
3661 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3662 ctx.speculative_offset);
3663 }
ec77d61f 3664
a0fd3373 3665 fprintf (f, " %s%s%s%s\n ",
ec77d61f 3666 final ? "This is a complete list." :
68377e53
JH
3667 "This is partial list; extra targets may be defined in other units.",
3668 ctx.maybe_in_construction ? " (base types included)" : "",
a0fd3373
JH
3669 ctx.maybe_derived_type ? " (derived types included)" : "",
3670 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
eefe9a99 3671 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
3672 {
3673 char *name = NULL;
a0fd3373
JH
3674 if (i == (unsigned)speculative)
3675 fprintf (f, "\n Targets that are not likely:\n"
ec77d61f
JH
3676 " ");
3677 if (in_lto_p)
3678 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3679 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3680 if (in_lto_p)
3681 free (name);
3682 if (!targets[i]->definition)
3683 fprintf (f, " (no definition%s)",
3684 DECL_DECLARED_INLINE_P (targets[i]->decl)
3685 ? " inline" : "");
3686 }
68377e53 3687 fprintf (f, "\n\n");
eefe9a99
JH
3688}
3689
0e1474e5
JH
3690
3691/* Return true if N can be possibly target of a polymorphic call of
3692 OTR_TYPE/OTR_TOKEN. */
3693
3694bool
3695possible_polymorphic_call_target_p (tree otr_type,
3696 HOST_WIDE_INT otr_token,
68377e53 3697 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
3698 struct cgraph_node *n)
3699{
3700 vec <cgraph_node *> targets;
3701 unsigned int i;
68377e53 3702 enum built_in_function fcode;
450ad0cd 3703 bool final;
0e1474e5 3704
68377e53
JH
3705 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3706 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3707 == BUILT_IN_UNREACHABLE
3708 || fcode == BUILT_IN_TRAP))
3709 return true;
3710
c203e8a7 3711 if (!odr_hash)
0e1474e5 3712 return true;
68377e53 3713 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 3714 for (i = 0; i < targets.length (); i++)
d52f5295 3715 if (n->semantically_equivalent_p (targets[i]))
0e1474e5 3716 return true;
450ad0cd
JH
3717
3718 /* At a moment we allow middle end to dig out new external declarations
3719 as a targets of polymorphic calls. */
67348ccc 3720 if (!final && !n->definition)
450ad0cd 3721 return true;
0e1474e5
JH
3722 return false;
3723}
3724
3725
3726/* After callgraph construction new external nodes may appear.
3727 Add them into the graph. */
3728
3729void
3730update_type_inheritance_graph (void)
3731{
3732 struct cgraph_node *n;
3733
c203e8a7 3734 if (!odr_hash)
0e1474e5
JH
3735 return;
3736 free_polymorphic_call_targets_hash ();
3737 timevar_push (TV_IPA_INHERITANCE);
68377e53 3738 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
3739 the unit. */
3740 FOR_EACH_FUNCTION (n)
67348ccc
DM
3741 if (DECL_VIRTUAL_P (n->decl)
3742 && !n->definition
d52f5295 3743 && n->real_symbol_p ())
549bcbd1
JH
3744 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3745 true);
0e1474e5
JH
3746 timevar_pop (TV_IPA_INHERITANCE);
3747}
bbc9396b
JH
3748
3749
3750/* Return true if N looks like likely target of a polymorphic call.
3751 Rule out cxa_pure_virtual, noreturns, function declared cold and
3752 other obvious cases. */
3753
3754bool
3755likely_target_p (struct cgraph_node *n)
3756{
3757 int flags;
3758 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 3759 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 3760 return false;
67348ccc 3761 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
3762 if (flags & ECF_NORETURN)
3763 return false;
3764 if (lookup_attribute ("cold",
67348ccc 3765 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
3766 return false;
3767 if (n->frequency < NODE_FREQUENCY_NORMAL)
3768 return false;
ccb05ef2
JH
3769 /* If there are no virtual tables refering the target alive,
3770 the only way the target can be called is an instance comming from other
3771 compilation unit; speculative devirtualization is build around an
3772 assumption that won't happen. */
3773 if (!referenced_from_vtable_p (n))
3774 return false;
bbc9396b
JH
3775 return true;
3776}
3777
91bc34a9
JH
3778/* Compare type warning records P1 and P2 and chose one with larger count;
3779 helper for qsort. */
3780
3781int
3782type_warning_cmp (const void *p1, const void *p2)
3783{
3784 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3785 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3786
3787 if (t1->dyn_count < t2->dyn_count)
3788 return 1;
3789 if (t1->dyn_count > t2->dyn_count)
3790 return -1;
3791 return t2->count - t1->count;
3792}
3793
3794/* Compare decl warning records P1 and P2 and chose one with larger count;
3795 helper for qsort. */
3796
3797int
3798decl_warning_cmp (const void *p1, const void *p2)
3799{
3800 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3801 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3802
3803 if (t1->dyn_count < t2->dyn_count)
3804 return 1;
3805 if (t1->dyn_count > t2->dyn_count)
3806 return -1;
3807 return t2->count - t1->count;
3808}
3809
bbc9396b 3810/* The ipa-devirt pass.
3462aa02
JH
3811 When polymorphic call has only one likely target in the unit,
3812 turn it into speculative call. */
bbc9396b
JH
3813
3814static unsigned int
3815ipa_devirt (void)
3816{
3817 struct cgraph_node *n;
6e2830c3 3818 hash_set<void *> bad_call_targets;
bbc9396b
JH
3819 struct cgraph_edge *e;
3820
3821 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3822 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 3823 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b 3824
91bc34a9
JH
3825 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3826 This is implemented by setting up final_warning_records that are updated
3827 by get_polymorphic_call_targets.
3828 We need to clear cache in this case to trigger recomputation of all
3829 entries. */
3830 if (warn_suggest_final_methods || warn_suggest_final_types)
3831 {
3832 final_warning_records = new (final_warning_record);
3833 final_warning_records->type_warnings = vNULL;
3834 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3835 free_polymorphic_call_targets_hash ();
3836 }
3837
bbc9396b
JH
3838 FOR_EACH_DEFINED_FUNCTION (n)
3839 {
3840 bool update = false;
3841 if (dump_file && n->indirect_calls)
3842 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 3843 n->name (), n->order);
bbc9396b
JH
3844 for (e = n->indirect_calls; e; e = e->next_callee)
3845 if (e->indirect_info->polymorphic)
3846 {
3847 struct cgraph_node *likely_target = NULL;
3848 void *cache_token;
3849 bool final;
a0fd3373 3850 int speculative_targets;
91bc34a9
JH
3851
3852 if (final_warning_records)
3853 final_warning_records->dyn_count = e->count;
3854
bbc9396b
JH
3855 vec <cgraph_node *>targets
3856 = possible_polymorphic_call_targets
a0fd3373 3857 (e, &final, &cache_token, &speculative_targets);
bbc9396b
JH
3858 unsigned int i;
3859
3860 if (dump_file)
3861 dump_possible_polymorphic_call_targets
3862 (dump_file, e);
3462aa02 3863
bbc9396b
JH
3864 npolymorphic++;
3865
91bc34a9
JH
3866 if (!flag_devirtualize_speculatively)
3867 continue;
3868
bbc9396b
JH
3869 if (!cgraph_maybe_hot_edge_p (e))
3870 {
3871 if (dump_file)
ec77d61f 3872 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
3873 ncold++;
3874 continue;
3875 }
3876 if (e->speculative)
3877 {
3878 if (dump_file)
ec77d61f 3879 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
3880 nspeculated++;
3881
3882 /* When dumping see if we agree with speculation. */
3883 if (!dump_file)
3884 continue;
3885 }
6e2830c3 3886 if (bad_call_targets.contains (cache_token))
bbc9396b
JH
3887 {
3888 if (dump_file)
ec77d61f 3889 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
3890 nmultiple++;
3891 continue;
3892 }
c3284718 3893 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
3894 if (likely_target_p (targets[i]))
3895 {
3896 if (likely_target)
3897 {
a0fd3373 3898 if (i < (unsigned) speculative_targets)
ec77d61f
JH
3899 {
3900 likely_target = NULL;
3901 if (dump_file)
3902 fprintf (dump_file, "More than one likely target\n\n");
3903 nmultiple++;
3904 }
bbc9396b
JH
3905 break;
3906 }
3907 likely_target = targets[i];
3908 }
3909 if (!likely_target)
3910 {
6e2830c3 3911 bad_call_targets.add (cache_token);
bbc9396b
JH
3912 continue;
3913 }
3914 /* This is reached only when dumping; check if we agree or disagree
3915 with the speculation. */
3916 if (e->speculative)
3917 {
3918 struct cgraph_edge *e2;
3919 struct ipa_ref *ref;
3920 cgraph_speculative_call_info (e, e2, e, ref);
d52f5295
ML
3921 if (e2->callee->ultimate_alias_target ()
3922 == likely_target->ultimate_alias_target ())
bbc9396b 3923 {
ec77d61f 3924 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
3925 nok++;
3926 }
3927 else
3928 {
ec77d61f 3929 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
3930 nwrong++;
3931 }
3932 continue;
3933 }
67348ccc 3934 if (!likely_target->definition)
bbc9396b
JH
3935 {
3936 if (dump_file)
ec77d61f 3937 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
3938 nnotdefined++;
3939 continue;
3940 }
3941 /* Do not introduce new references to external symbols. While we
3942 can handle these just well, it is common for programs to
3943 incorrectly with headers defining methods they are linked
3944 with. */
67348ccc 3945 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
3946 {
3947 if (dump_file)
ec77d61f 3948 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
3949 nexternal++;
3950 continue;
3951 }
570215f9
JM
3952 /* Don't use an implicitly-declared destructor (c++/58678). */
3953 struct cgraph_node *non_thunk_target
d52f5295 3954 = likely_target->function_symbol ();
570215f9
JM
3955 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3956 && DECL_COMDAT (non_thunk_target->decl))
3957 {
3958 if (dump_file)
3959 fprintf (dump_file, "Target is artificial\n\n");
3960 nartificial++;
3961 continue;
3962 }
d52f5295
ML
3963 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3964 && likely_target->can_be_discarded_p ())
bbc9396b
JH
3965 {
3966 if (dump_file)
ec77d61f 3967 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
3968 noverwritable++;
3969 continue;
3970 }
2b5f0895 3971 else if (dbg_cnt (devirt))
bbc9396b 3972 {
2b5f0895
XDL
3973 if (dump_enabled_p ())
3974 {
807b7d62 3975 location_t locus = gimple_location_safe (e->call_stmt);
2b5f0895
XDL
3976 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3977 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3978 n->name (), n->order,
3979 likely_target->name (),
3980 likely_target->order);
3981 }
d52f5295 3982 if (!likely_target->can_be_discarded_p ())
5b79657a
JH
3983 {
3984 cgraph_node *alias;
d52f5295 3985 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
5b79657a
JH
3986 if (alias)
3987 likely_target = alias;
3988 }
bbc9396b
JH
3989 nconverted++;
3990 update = true;
3991 cgraph_turn_edge_to_speculative
3992 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3993 }
3994 }
3995 if (update)
3996 inline_update_overall_summary (n);
3997 }
91bc34a9
JH
3998 if (warn_suggest_final_methods || warn_suggest_final_types)
3999 {
4000 if (warn_suggest_final_types)
4001 {
4002 final_warning_records->type_warnings.qsort (type_warning_cmp);
4003 for (unsigned int i = 0;
4004 i < final_warning_records->type_warnings.length (); i++)
4005 if (final_warning_records->type_warnings[i].count)
4006 {
9716cc3e
JH
4007 tree type = final_warning_records->type_warnings[i].type;
4008 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
91bc34a9
JH
4009 OPT_Wsuggest_final_types,
4010 "Declaring type %qD final "
4011 "would enable devirtualization of %i calls",
9716cc3e 4012 type,
91bc34a9
JH
4013 final_warning_records->type_warnings[i].count);
4014 }
4015 }
4016
4017 if (warn_suggest_final_methods)
4018 {
4019 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
4020
4021 final_warning_records->decl_warnings.traverse
4022 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4023 decl_warnings_vec.qsort (decl_warning_cmp);
4024 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4025 {
4026 tree decl = decl_warnings_vec[i]->decl;
4027 int count = decl_warnings_vec[i]->count;
4028
4029 if (DECL_CXX_DESTRUCTOR_P (decl))
4030 warning_at (DECL_SOURCE_LOCATION (decl),
4031 OPT_Wsuggest_final_methods,
4032 "Declaring virtual destructor of %qD final "
4033 "would enable devirtualization of %i calls",
4034 DECL_CONTEXT (decl), count);
4035 else
4036 warning_at (DECL_SOURCE_LOCATION (decl),
4037 OPT_Wsuggest_final_methods,
4038 "Declaring method %qD final "
4039 "would enable devirtualization of %i calls",
4040 decl, count);
4041 }
4042 }
4043
4044 delete (final_warning_records);
4045 final_warning_records = 0;
4046 }
bbc9396b
JH
4047
4048 if (dump_file)
4049 fprintf (dump_file,
4050 "%i polymorphic calls, %i devirtualized,"
4051 " %i speculatively devirtualized, %i cold\n"
4052 "%i have multiple targets, %i overwritable,"
4053 " %i already speculated (%i agree, %i disagree),"
570215f9 4054 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
4055 npolymorphic, ndevirtualized, nconverted, ncold,
4056 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 4057 nexternal, nnotdefined, nartificial);
bbc9396b
JH
4058 return ndevirtualized ? TODO_remove_functions : 0;
4059}
4060
bbc9396b
JH
4061namespace {
4062
4063const pass_data pass_data_ipa_devirt =
4064{
4065 IPA_PASS, /* type */
4066 "devirt", /* name */
4067 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
4068 TV_IPA_DEVIRT, /* tv_id */
4069 0, /* properties_required */
4070 0, /* properties_provided */
4071 0, /* properties_destroyed */
4072 0, /* todo_flags_start */
4073 ( TODO_dump_symtab ), /* todo_flags_finish */
4074};
4075
4076class pass_ipa_devirt : public ipa_opt_pass_d
4077{
4078public:
c3284718
RS
4079 pass_ipa_devirt (gcc::context *ctxt)
4080 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4081 NULL, /* generate_summary */
4082 NULL, /* write_summary */
4083 NULL, /* read_summary */
4084 NULL, /* write_optimization_summary */
4085 NULL, /* read_optimization_summary */
4086 NULL, /* stmt_fixup */
4087 0, /* function_transform_todo_flags_start */
4088 NULL, /* function_transform */
4089 NULL) /* variable_transform */
bbc9396b
JH
4090 {}
4091
4092 /* opt_pass methods: */
1a3d085c
TS
4093 virtual bool gate (function *)
4094 {
4095 return (flag_devirtualize
91bc34a9
JH
4096 && (flag_devirtualize_speculatively
4097 || (warn_suggest_final_methods
4098 || warn_suggest_final_types))
1a3d085c
TS
4099 && optimize);
4100 }
4101
be55bfe6 4102 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
4103
4104}; // class pass_ipa_devirt
4105
4106} // anon namespace
4107
4108ipa_opt_pass_d *
4109make_pass_ipa_devirt (gcc::context *ctxt)
4110{
4111 return new pass_ipa_devirt (ctxt);
4112}
4113
eefe9a99 4114#include "gt-ipa-devirt.h"