]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
re PR c/59855 (Support sparse-style __attribute__((designated_init)) on structures...
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
6d8eb96b 121#include "inchash.h"
eefe9a99
JH
122#include "tree-pretty-print.h"
123#include "ipa-utils.h"
2fb9a547
AM
124#include "tree-ssa-alias.h"
125#include "internal-fn.h"
126#include "gimple-fold.h"
127#include "gimple-expr.h"
eefe9a99 128#include "gimple.h"
bbc9396b 129#include "ipa-inline.h"
61a74079 130#include "diagnostic.h"
68377e53 131#include "tree-dfa.h"
ec77d61f 132#include "demangle.h"
2b5f0895 133#include "dbgcnt.h"
c59f7203
JH
134#include "stor-layout.h"
135#include "intl.h"
136
137static bool odr_types_equivalent_p (tree, tree, bool, bool *, pointer_set_t *);
ec77d61f
JH
138
139static bool odr_violation_reported = false;
68377e53
JH
140
141/* Dummy polymorphic call context. */
142
143const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
3339f0bc 144 = {0, 0, NULL, NULL, false, true, true};
eefe9a99 145
0e1474e5
JH
146/* Pointer set of all call targets appearing in the cache. */
147static pointer_set_t *cached_polymorphic_call_targets;
148
eefe9a99
JH
149/* The node of type inheritance graph. For each type unique in
150 One Defintion Rule (ODR) sense, we produce one node linking all
151 main variants of types equivalent to it, bases and derived types. */
152
153struct GTY(()) odr_type_d
154{
eefe9a99
JH
155 /* leader type. */
156 tree type;
549bcbd1 157 /* All bases; built only for main variants of types */
eefe9a99 158 vec<odr_type> GTY((skip)) bases;
549bcbd1
JH
159 /* All derrived types with virtual methods seen in unit;
160 built only for main variants oftypes */
eefe9a99 161 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 162
61a74079
JH
163 /* All equivalent types, if more than one. */
164 vec<tree, va_gc> *types;
165 /* Set of all equivalent types, if NON-NULL. */
166 pointer_set_t * GTY((skip)) types_set;
167
0e1474e5
JH
168 /* Unique ID indexing the type in odr_types array. */
169 int id;
eefe9a99
JH
170 /* Is it in anonymous namespace? */
171 bool anonymous_namespace;
2d1644bf
JH
172 /* Do we know about all derivations of given type? */
173 bool all_derivations_known;
549bcbd1
JH
174 /* Did we report ODR violation here? */
175 bool odr_violated;
eefe9a99
JH
176};
177
3339f0bc
JH
178static bool contains_type_p (tree, HOST_WIDE_INT, tree);
179
eefe9a99 180
0e1474e5
JH
181/* Return true if BINFO corresponds to a type with virtual methods.
182
183 Every type has several BINFOs. One is the BINFO associated by the type
184 while other represents bases of derived types. The BINFOs representing
185 bases do not have BINFO_VTABLE pointer set when this is the single
186 inheritance (because vtables are shared). Look up the BINFO of type
187 and check presence of its vtable. */
eefe9a99
JH
188
189static inline bool
190polymorphic_type_binfo_p (tree binfo)
191{
192 /* See if BINFO's type has an virtual table associtated with it. */
193 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
194}
195
2d1644bf
JH
196/* Return TRUE if all derived types of T are known and thus
197 we may consider the walk of derived type complete.
198
199 This is typically true only for final anonymous namespace types and types
200 defined within functions (that may be COMDAT and thus shared across units,
201 but with the same set of derived types). */
202
203static bool
204type_all_derivations_known_p (tree t)
205{
206 if (TYPE_FINAL_P (t))
207 return true;
208 if (flag_ltrans)
209 return false;
210 if (type_in_anonymous_namespace_p (t))
211 return true;
212 return (decl_function_context (TYPE_NAME (t)) != NULL);
213}
214
215/* Return TURE if type's constructors are all visible. */
216
217static bool
218type_all_ctors_visible_p (tree t)
219{
220 return !flag_ltrans
221 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
222 /* We can not always use type_all_derivations_known_p.
223 For function local types we must assume case where
224 the function is COMDAT and shared in between units.
225
226 TODO: These cases are quite easy to get, but we need
227 to keep track of C++ privatizing via -Wno-weak
228 as well as the IPA privatizing. */
229 && type_in_anonymous_namespace_p (t);
230}
231
232/* Return TRUE if type may have instance. */
233
234static bool
235type_possibly_instantiated_p (tree t)
236{
237 tree vtable;
238 varpool_node *vnode;
239
240 /* TODO: Add abstract types here. */
241 if (!type_all_ctors_visible_p (t))
242 return true;
243
244 vtable = BINFO_VTABLE (TYPE_BINFO (t));
245 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
246 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
9041d2e6 247 vnode = varpool_node::get (vtable);
2d1644bf
JH
248 return vnode && vnode->definition;
249}
250
eefe9a99
JH
251/* One Definition Rule hashtable helpers. */
252
253struct odr_hasher
254{
255 typedef odr_type_d value_type;
256 typedef union tree_node compare_type;
257 static inline hashval_t hash (const value_type *);
258 static inline bool equal (const value_type *, const compare_type *);
259 static inline void remove (value_type *);
260};
261
549bcbd1
JH
262/* Return type that was declared with T's name so that T is an
263 qualified variant of it. */
264
265static inline tree
266main_odr_variant (const_tree t)
267{
268 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
269 return TREE_TYPE (TYPE_NAME (t));
270 /* Unnamed types and non-C++ produced types can be compared by variants. */
271 else
272 return TYPE_MAIN_VARIANT (t);
273}
274
eefe9a99
JH
275/* Produce hash based on type name. */
276
549bcbd1 277static hashval_t
eefe9a99
JH
278hash_type_name (tree t)
279{
549bcbd1 280 gcc_checking_assert (main_odr_variant (t) == t);
eefe9a99
JH
281
282 /* If not in LTO, all main variants are unique, so we can do
283 pointer hash. */
284 if (!in_lto_p)
285 return htab_hash_pointer (t);
286
287 /* Anonymous types are unique. */
288 if (type_in_anonymous_namespace_p (t))
289 return htab_hash_pointer (t);
290
61a74079 291 /* For polymorphic types, we can simply hash the virtual table. */
549bcbd1
JH
292 if (TREE_CODE (t) == RECORD_TYPE
293 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
61a74079
JH
294 {
295 tree v = BINFO_VTABLE (TYPE_BINFO (t));
296 hashval_t hash = 0;
297
298 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
299 {
300 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
301 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
302 }
303
304 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
305 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
306 return hash;
307 }
308
eefe9a99
JH
309 /* Rest is not implemented yet. */
310 gcc_unreachable ();
311}
312
313/* Return the computed hashcode for ODR_TYPE. */
314
315inline hashval_t
316odr_hasher::hash (const value_type *odr_type)
317{
318 return hash_type_name (odr_type->type);
319}
320
549bcbd1
JH
321/* For languages with One Definition Rule, work out if
322 types are the same based on their name.
323
324 This is non-trivial for LTO where minnor differences in
325 the type representation may have prevented type merging
326 to merge two copies of otherwise equivalent type.
327
328 Until we start streaming mangled type names, this function works
329 only for polymorphic types. */
330
331bool
332types_same_for_odr (const_tree type1, const_tree type2)
333{
334 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
335
336 type1 = main_odr_variant (type1);
337 type2 = main_odr_variant (type2);
338
339 if (type1 == type2)
340 return true;
341
342 if (!in_lto_p)
343 return false;
344
345 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
346 on the corresponding TYPE_STUB_DECL. */
347 if (type_in_anonymous_namespace_p (type1)
348 || type_in_anonymous_namespace_p (type2))
349 return false;
350
01a92e70
JH
351 /* See if types are obvoiusly different (i.e. different codes
352 or polymorphis wrt non-polymorphic). This is not strictly correct
353 for ODR violating programs, but we can't do better without streaming
354 ODR names. */
355 if (TREE_CODE (type1) != TREE_CODE (type2))
356 return false;
357 if (TREE_CODE (type1) == RECORD_TYPE
358 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
359 return false;
360 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
361 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
362 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
363 return false;
364
549bcbd1
JH
365 /* At the moment we have no way to establish ODR equivlaence at LTO
366 other than comparing virtual table pointrs of polymorphic types.
367 Eventually we should start saving mangled names in TYPE_NAME.
368 Then this condition will become non-trivial. */
369
370 if (TREE_CODE (type1) == RECORD_TYPE
371 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
372 && BINFO_VTABLE (TYPE_BINFO (type1))
373 && BINFO_VTABLE (TYPE_BINFO (type2)))
374 {
375 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
376 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
377 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
378 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
379 return (operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0)
381 && DECL_ASSEMBLER_NAME
382 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
383 == DECL_ASSEMBLER_NAME
384 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
385 }
386 gcc_unreachable ();
387}
388
389
0e1474e5 390/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
391 equivalent. */
392
393inline bool
394odr_hasher::equal (const value_type *t1, const compare_type *ct2)
395{
396 tree t2 = const_cast <tree> (ct2);
397
549bcbd1 398 gcc_checking_assert (main_odr_variant (t2) == t2);
eefe9a99
JH
399 if (t1->type == t2)
400 return true;
401 if (!in_lto_p)
402 return false;
403 return types_same_for_odr (t1->type, t2);
404}
405
0e1474e5 406/* Free ODR type V. */
eefe9a99
JH
407
408inline void
409odr_hasher::remove (value_type *v)
410{
411 v->bases.release ();
412 v->derived_types.release ();
61a74079
JH
413 if (v->types_set)
414 pointer_set_destroy (v->types_set);
eefe9a99
JH
415 ggc_free (v);
416}
417
418/* ODR type hash used to lookup ODR type based on tree type node. */
419
c203e8a7
TS
420typedef hash_table<odr_hasher> odr_hash_type;
421static odr_hash_type *odr_hash;
eefe9a99
JH
422
423/* ODR types are also stored into ODR_TYPE vector to allow consistent
424 walking. Bases appear before derived types. Vector is garbage collected
425 so we won't end up visiting empty types. */
426
427static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
428#define odr_types (*odr_types_ptr)
429
c7e1befa
JH
430/* Set TYPE_BINFO of TYPE and its variants to BINFO. */
431void
432set_type_binfo (tree type, tree binfo)
433{
434 for (; type; type = TYPE_NEXT_VARIANT (type))
435 if (COMPLETE_TYPE_P (type))
436 TYPE_BINFO (type) = binfo;
437 else
438 gcc_assert (!TYPE_BINFO (type));
439}
440
c59f7203
JH
441/* Compare T2 and T2 based on name or structure. */
442
443static bool
444odr_subtypes_equivalent_p (tree t1, tree t2, pointer_set_t *visited)
445{
446 bool an1, an2;
447
448 /* This can happen in incomplete types that should be handled earlier. */
449 gcc_assert (t1 && t2);
450
451 t1 = main_odr_variant (t1);
452 t2 = main_odr_variant (t2);
453 if (t1 == t2)
454 return true;
455 if (TREE_CODE (t1) != TREE_CODE (t2))
456 return false;
457 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
458 return false;
459 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
460 return false;
461
462 /* Anonymous namespace types must match exactly. */
463 an1 = type_in_anonymous_namespace_p (t1);
464 an2 = type_in_anonymous_namespace_p (t2);
465 if (an1 != an2 || an1)
466 return false;
467
468 /* For types where we can not establish ODR equivalency, recurse and deeply
469 compare. */
470 if (TREE_CODE (t1) != RECORD_TYPE
471 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
472 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
473 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
474 {
475 /* This should really be a pair hash, but for the moment we do not need
476 100% reliability and it would be better to compare all ODR types so
477 recursion here is needed only for component types. */
478 if (pointer_set_insert (visited, t1))
479 return true;
69dc8208 480 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
c59f7203
JH
481 }
482 return types_same_for_odr (t1, t2);
483}
484
485/* Output ODR violation warning about T1 and T2 with REASON.
486 Display location of ST1 and ST2 if REASON speaks about field or
487 method of the type.
488 If WARN is false, do nothing. Set WARNED if warning was indeed
489 output. */
490
491void
492warn_odr (tree t1, tree t2, tree st1, tree st2,
493 bool warn, bool *warned, const char *reason)
494{
495 tree decl2 = TYPE_NAME (t2);
496
497 if (!warn)
498 return;
499 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
500 "type %qT violates one definition rule",
501 t1))
502 return;
503 if (!st1)
504 ;
505 else if (TREE_CODE (st1) == FIELD_DECL)
506 {
507 inform (DECL_SOURCE_LOCATION (decl2),
508 "a different type is defined in another translation unit");
509 inform (DECL_SOURCE_LOCATION (st1),
510 "the first difference of corresponding definitions is field %qD",
511 st1);
512 decl2 = st2;
513 }
514 else if (TREE_CODE (st1) == FUNCTION_DECL)
515 {
516 inform (DECL_SOURCE_LOCATION (decl2),
517 "a different type is defined in another translation unit");
518 inform (DECL_SOURCE_LOCATION (st1),
519 "the first difference of corresponding definitions is method %qD",
520 st1);
521 decl2 = st2;
522 }
523 else
524 return;
525 inform (DECL_SOURCE_LOCATION (decl2), reason);
526
527 if (warned)
528 *warned = true;
529}
530
531/* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
532 because they are used on same place in ODR matching types.
533 They are not; inform the user. */
534
535void
536warn_types_mismatch (tree t1, tree t2)
537{
538 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
539 return;
540 /* In Firefox it is a common bug to have same types but in
541 different namespaces. Be a bit more informative on
542 this. */
543 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
544 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
545 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
546 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
547 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
548 DECL_NAME (TYPE_CONTEXT (t2))))))
549 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
550 "type %qT should match type %qT but is defined "
551 "in different namespace ",
552 t1, t2);
553 else
554 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
555 "type %qT should match type %qT",
556 t1, t2);
557 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
558 "the incompatible type is defined here");
559}
560
561/* Compare T1 and T2, report ODR violations if WARN is true and set
562 WARNED to true if anything is reported. Return true if types match.
563 If true is returned, the types are also compatible in the sense of
564 gimple_canonical_types_compatible_p. */
565
566static bool
567odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, pointer_set_t *visited)
568{
569 /* Check first for the obvious case of pointer identity. */
570 if (t1 == t2)
571 return true;
572 gcc_assert (!type_in_anonymous_namespace_p (t1));
573 gcc_assert (!type_in_anonymous_namespace_p (t2));
574
575 /* Can't be the same type if the types don't have the same code. */
576 if (TREE_CODE (t1) != TREE_CODE (t2))
577 {
578 warn_odr (t1, t2, NULL, NULL, warn, warned,
579 G_("a different type is defined in another translation unit"));
580 return false;
581 }
582
583 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
584 {
585 warn_odr (t1, t2, NULL, NULL, warn, warned,
586 G_("a type with different qualifiers is defined in another "
587 "translation unit"));
588 return false;
589 }
590
591 if (comp_type_attributes (t1, t2) != 1)
592 {
593 warn_odr (t1, t2, NULL, NULL, warn, warned,
594 G_("a type with attributes "
595 "is defined in another translation unit"));
596 return false;
597 }
598
599 if (TREE_CODE (t1) == ENUMERAL_TYPE)
600 {
601 tree v1, v2;
602 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
603 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
604 {
605 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
606 {
607 warn_odr (t1, t2, NULL, NULL, warn, warned,
608 G_("an enum with different value name"
609 " is defined in another translation unit"));
610 return false;
611 }
612 if (TREE_VALUE (v1) != TREE_VALUE (v2)
613 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
614 DECL_INITIAL (TREE_VALUE (v2)), 0))
615 {
616 warn_odr (t1, t2, NULL, NULL, warn, warned,
617 G_("an enum with different values is defined"
618 " in another translation unit"));
619 return false;
620 }
621 }
622 if (v1 || v2)
623 {
624 warn_odr (t1, t2, NULL, NULL, warn, warned,
625 G_("an enum with mismatching number of values "
626 "is defined in another translation unit"));
627 return false;
628 }
629 }
630
631 /* Non-aggregate types can be handled cheaply. */
632 if (INTEGRAL_TYPE_P (t1)
633 || SCALAR_FLOAT_TYPE_P (t1)
634 || FIXED_POINT_TYPE_P (t1)
635 || TREE_CODE (t1) == VECTOR_TYPE
636 || TREE_CODE (t1) == COMPLEX_TYPE
637 || TREE_CODE (t1) == OFFSET_TYPE
638 || POINTER_TYPE_P (t1))
639 {
640 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
641 {
642 warn_odr (t1, t2, NULL, NULL, warn, warned,
643 G_("a type with different precision is defined "
644 "in another translation unit"));
645 return false;
646 }
647 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
648 {
649 warn_odr (t1, t2, NULL, NULL, warn, warned,
650 G_("a type with different signedness is defined "
651 "in another translation unit"));
652 return false;
653 }
654
655 if (TREE_CODE (t1) == INTEGER_TYPE
656 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
657 {
658 /* char WRT uint_8? */
659 warn_odr (t1, t2, NULL, NULL, warn, warned,
660 G_("a different type is defined in another "
661 "translation unit"));
662 return false;
663 }
664
665 /* For canonical type comparisons we do not want to build SCCs
666 so we cannot compare pointed-to types. But we can, for now,
667 require the same pointed-to type kind and match what
668 useless_type_conversion_p would do. */
669 if (POINTER_TYPE_P (t1))
670 {
671 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
672 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
673 {
674 warn_odr (t1, t2, NULL, NULL, warn, warned,
675 G_("it is defined as a pointer in different address "
676 "space in another translation unit"));
677 return false;
678 }
679
680 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
681 {
682 warn_odr (t1, t2, NULL, NULL, warn, warned,
683 G_("it is defined as a pointer to different type "
684 "in another translation unit"));
685 if (warn && warned)
686 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
687 return false;
688 }
689 }
690
691 /* Tail-recurse to components. */
692 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
693 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
694 {
695 /* Probably specific enough. */
696 warn_odr (t1, t2, NULL, NULL, warn, warned,
697 G_("a different type is defined "
698 "in another translation unit"));
699 if (warn && warned)
700 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
701 return false;
702 }
703
704 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
705 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
706 TYPE_SIZE_UNIT (t2), 0));
707 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
708
709 return true;
710 }
711
712 /* Do type-specific comparisons. */
713 switch (TREE_CODE (t1))
714 {
715 case ARRAY_TYPE:
716 {
717 /* Array types are the same if the element types are the same and
718 the number of elements are the same. */
719 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
720 {
721 warn_odr (t1, t2, NULL, NULL, warn, warned,
722 G_("a different type is defined in another "
723 "translation unit"));
724 if (warn && warned)
725 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
726 }
727 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
728 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
729 == TYPE_NONALIASED_COMPONENT (t2));
730
731 tree i1 = TYPE_DOMAIN (t1);
732 tree i2 = TYPE_DOMAIN (t2);
733
734 /* For an incomplete external array, the type domain can be
735 NULL_TREE. Check this condition also. */
736 if (i1 == NULL_TREE || i2 == NULL_TREE)
737 return true;
738
739 tree min1 = TYPE_MIN_VALUE (i1);
740 tree min2 = TYPE_MIN_VALUE (i2);
741 tree max1 = TYPE_MAX_VALUE (i1);
742 tree max2 = TYPE_MAX_VALUE (i2);
743
744 /* In C++, minimums should be always 0. */
745 gcc_assert (min1 == min2);
746 if (!operand_equal_p (max1, max2, 0))
747 {
748 warn_odr (t1, t2, NULL, NULL, warn, warned,
749 G_("an array of different size is defined "
750 "in another translation unit"));
751 return false;
752 }
753 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
754 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
755 TYPE_SIZE_UNIT (t2), 0));
756 }
757 return true;
758
759 case METHOD_TYPE:
760 case FUNCTION_TYPE:
761 /* Function types are the same if the return type and arguments types
762 are the same. */
763 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
764 {
765 warn_odr (t1, t2, NULL, NULL, warn, warned,
766 G_("has different return value "
767 "in another translation unit"));
768 if (warn && warned)
769 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
770 return false;
771 }
772
773 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
774 return true;
775 else
776 {
777 tree parms1, parms2;
778
779 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
780 parms1 && parms2;
781 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
782 {
783 if (!odr_subtypes_equivalent_p
784 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
785 {
786 warn_odr (t1, t2, NULL, NULL, warn, warned,
787 G_("has different parameters in another "
788 "translation unit"));
789 if (warn && warned)
790 warn_types_mismatch (TREE_VALUE (parms1),
791 TREE_VALUE (parms2));
792 return false;
793 }
794 }
795
796 if (parms1 || parms2)
797 {
798 warn_odr (t1, t2, NULL, NULL, warn, warned,
799 G_("has different parameters "
800 "in another translation unit"));
801 return false;
802 }
803
804 return true;
805 }
806
807 case RECORD_TYPE:
808 case UNION_TYPE:
809 case QUAL_UNION_TYPE:
810 {
811 tree f1, f2;
812
813 /* For aggregate types, all the fields must be the same. */
814 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
815 {
816 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
817 f1 || f2;
818 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
819 {
820 /* Skip non-fields. */
821 while (f1 && TREE_CODE (f1) != FIELD_DECL)
822 f1 = TREE_CHAIN (f1);
823 while (f2 && TREE_CODE (f2) != FIELD_DECL)
824 f2 = TREE_CHAIN (f2);
825 if (!f1 || !f2)
826 break;
827 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
828 break;
829 if (DECL_NAME (f1) != DECL_NAME (f2)
830 && !DECL_ARTIFICIAL (f1))
831 {
832 warn_odr (t1, t2, f1, f2, warn, warned,
833 G_("a field with different name is defined "
834 "in another translation unit"));
835 return false;
836 }
837 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
838 {
839 /* Do not warn about artificial fields and just go into generic
840 field mismatch warning. */
841 if (DECL_ARTIFICIAL (f1))
842 break;
843
844 warn_odr (t1, t2, f1, f2, warn, warned,
845 G_("a field of same name but different type "
846 "is defined in another translation unit"));
847 if (warn && warned)
848 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
849 return false;
850 }
851 if (!gimple_compare_field_offset (f1, f2))
852 {
853 /* Do not warn about artificial fields and just go into generic
854 field mismatch warning. */
855 if (DECL_ARTIFICIAL (f1))
856 break;
857 warn_odr (t1, t2, t1, t2, warn, warned,
858 G_("fields has different layout "
859 "in another translation unit"));
860 return false;
861 }
862 gcc_assert (DECL_NONADDRESSABLE_P (f1)
863 == DECL_NONADDRESSABLE_P (f2));
864 }
865
866 /* If one aggregate has more fields than the other, they
867 are not the same. */
868 if (f1 || f2)
869 {
870 warn_odr (t1, t2, NULL, NULL, warn, warned,
871 G_("a type with different number of fields "
872 "is defined in another translation unit"));
873 return false;
874 }
875 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
876 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
877 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
878 {
879 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
880 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
881 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
882 {
883 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
884 {
885 warn_odr (t1, t2, f1, f2, warn, warned,
886 G_("a different method of same type "
887 "is defined in another translation unit"));
888 return false;
889 }
890 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
891 {
892 warn_odr (t1, t2, f1, f2, warn, warned,
893 G_("s definition that differs by virtual "
894 "keyword in another translation unit"));
895 return false;
896 }
897 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
898 {
899 warn_odr (t1, t2, f1, f2, warn, warned,
900 G_("virtual table layout differs in another "
901 "translation unit"));
902 return false;
903 }
904 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
905 {
906 warn_odr (t1, t2, f1, f2, warn, warned,
907 G_("method with incompatible type is defined "
908 "in another translation unit"));
909 return false;
910 }
911 }
912 if (f1 || f2)
913 {
914 warn_odr (t1, t2, NULL, NULL, warn, warned,
915 G_("a type with different number of methods "
916 "is defined in another translation unit"));
917 return false;
918 }
919 }
920 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
921 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
922 TYPE_SIZE_UNIT (t2), 0));
923 }
924
925 return true;
926 }
927
928 default:
929 gcc_unreachable ();
930 }
931}
932
61a74079
JH
933/* TYPE is equivalent to VAL by ODR, but its tree representation differs
934 from VAL->type. This may happen in LTO where tree merging did not merge
935 all variants of the same type. It may or may not mean the ODR violation.
936 Add it to the list of duplicates and warn on some violations. */
937
549bcbd1 938static bool
61a74079
JH
939add_type_duplicate (odr_type val, tree type)
940{
549bcbd1 941 bool build_bases = false;
61a74079
JH
942 if (!val->types_set)
943 val->types_set = pointer_set_create ();
944
549bcbd1
JH
945 /* Always prefer complete type to be the leader. */
946 if (!COMPLETE_TYPE_P (val->type)
947 && COMPLETE_TYPE_P (type))
948 {
949 tree tmp = type;
950
951 build_bases = true;
952 type = val->type;
953 val->type = tmp;
954 }
955
61a74079
JH
956 /* See if this duplicate is new. */
957 if (!pointer_set_insert (val->types_set, type))
958 {
959 bool merge = true;
960 bool base_mismatch = false;
549bcbd1 961 unsigned int i,j;
c59f7203
JH
962 bool warned = false;
963 pointer_set_t *visited = pointer_set_create ();
549bcbd1 964
61a74079
JH
965 gcc_assert (in_lto_p);
966 vec_safe_push (val->types, type);
61a74079
JH
967
968 /* First we compare memory layout. */
c59f7203
JH
969 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
970 &warned, visited))
61a74079
JH
971 {
972 merge = false;
ec77d61f 973 odr_violation_reported = true;
549bcbd1 974 val->odr_violated = true;
61a74079
JH
975 if (cgraph_dump_file)
976 {
c59f7203 977 fprintf (cgraph_dump_file, "ODR violation\n");
61a74079
JH
978
979 print_node (cgraph_dump_file, "", val->type, 0);
980 putc ('\n',cgraph_dump_file);
981 print_node (cgraph_dump_file, "", type, 0);
982 putc ('\n',cgraph_dump_file);
983 }
984 }
c59f7203 985 pointer_set_destroy (visited);
61a74079
JH
986
987 /* Next sanity check that bases are the same. If not, we will end
988 up producing wrong answers. */
549bcbd1
JH
989 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
990 && TREE_CODE (val->type) == RECORD_TYPE
991 && TREE_CODE (type) == RECORD_TYPE
992 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
61a74079 993 {
549bcbd1
JH
994 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
995 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
996 {
997 odr_type base = get_odr_type
998 (BINFO_TYPE
999 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1000 i)),
1001 true);
1002 if (val->bases.length () <= j || val->bases[j] != base)
1003 base_mismatch = true;
1004 j++;
1005 }
1006 if (base_mismatch)
61a74079 1007 {
549bcbd1
JH
1008 merge = false;
1009 odr_violation_reported = true;
1010
c59f7203
JH
1011 if (!warned && !val->odr_violated)
1012 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1013 "a type with the same name but different bases is "
1014 "defined in another translation unit");
549bcbd1
JH
1015 val->odr_violated = true;
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1019
1020 print_node (cgraph_dump_file, "", val->type, 0);
1021 putc ('\n',cgraph_dump_file);
1022 print_node (cgraph_dump_file, "", type, 0);
1023 putc ('\n',cgraph_dump_file);
1024 }
61a74079
JH
1025 }
1026 }
1027
1028 /* Regularize things a little. During LTO same types may come with
1029 different BINFOs. Either because their virtual table was
1030 not merged by tree merging and only later at decl merging or
1031 because one type comes with external vtable, while other
1032 with internal. We want to merge equivalent binfos to conserve
1033 memory and streaming overhead.
1034
1035 The external vtables are more harmful: they contain references
1036 to external declarations of methods that may be defined in the
1037 merged LTO unit. For this reason we absolutely need to remove
1038 them and replace by internal variants. Not doing so will lead
1039 to incomplete answers from possible_polymorphic_call_targets. */
549bcbd1
JH
1040 if (!flag_ltrans && merge
1041 && TREE_CODE (val->type) == RECORD_TYPE
1042 && TREE_CODE (type) == RECORD_TYPE
1043 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1044 && TYPE_MAIN_VARIANT (type) == type
1045 && TYPE_MAIN_VARIANT (val->type) == val->type
1046 && BINFO_VTABLE (TYPE_BINFO (val->type))
1047 && BINFO_VTABLE (TYPE_BINFO (type)))
61a74079
JH
1048 {
1049 tree master_binfo = TYPE_BINFO (val->type);
1050 tree v1 = BINFO_VTABLE (master_binfo);
1051 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1052
1053 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1054 {
1055 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1056 && operand_equal_p (TREE_OPERAND (v1, 1),
1057 TREE_OPERAND (v2, 1), 0));
1058 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1059 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1060 }
1061 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1062 == DECL_ASSEMBLER_NAME (v2));
1063
1064 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1065 {
1066 unsigned int i;
1067
c7e1befa 1068 set_type_binfo (val->type, TYPE_BINFO (type));
c3284718 1069 for (i = 0; i < val->types->length (); i++)
61a74079
JH
1070 {
1071 if (TYPE_BINFO ((*val->types)[i])
1072 == master_binfo)
c7e1befa 1073 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
61a74079 1074 }
c7e1befa 1075 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
61a74079
JH
1076 }
1077 else
c7e1befa 1078 set_type_binfo (type, master_binfo);
61a74079
JH
1079 }
1080 }
549bcbd1 1081 return build_bases;
61a74079
JH
1082}
1083
eefe9a99
JH
1084/* Get ODR type hash entry for TYPE. If INSERT is true, create
1085 possibly new entry. */
1086
1087odr_type
1088get_odr_type (tree type, bool insert)
1089{
1090 odr_type_d **slot;
1091 odr_type val;
1092 hashval_t hash;
549bcbd1
JH
1093 bool build_bases = false;
1094 bool insert_to_odr_array = false;
1095 int base_id = -1;
1096
1097 type = main_odr_variant (type);
eefe9a99 1098
eefe9a99 1099 hash = hash_type_name (type);
c203e8a7
TS
1100 slot
1101 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
eefe9a99
JH
1102 if (!slot)
1103 return NULL;
1104
1105 /* See if we already have entry for type. */
1106 if (*slot)
1107 {
1108 val = *slot;
1109
61a74079
JH
1110 /* With LTO we need to support multiple tree representation of
1111 the same ODR type. */
1112 if (val->type != type)
549bcbd1 1113 build_bases = add_type_duplicate (val, type);
eefe9a99
JH
1114 }
1115 else
1116 {
766090c2 1117 val = ggc_cleared_alloc<odr_type_d> ();
eefe9a99
JH
1118 val->type = type;
1119 val->bases = vNULL;
1120 val->derived_types = vNULL;
0e1474e5 1121 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
549bcbd1
JH
1122 build_bases = COMPLETE_TYPE_P (val->type);
1123 insert_to_odr_array = true;
1124 }
1125
1126 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1127 && type == TYPE_MAIN_VARIANT (type))
1128 {
1129 tree binfo = TYPE_BINFO (type);
1130 unsigned int i;
1131
1132 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1133
2d1644bf 1134 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
1135 *slot = val;
1136 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1137 /* For now record only polymorphic types. other are
1138 pointless for devirtualization and we can not precisely
1139 determine ODR equivalency of these during LTO. */
1140 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1141 {
1142 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1143 i)),
1144 true);
549bcbd1 1145 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
eefe9a99
JH
1146 base->derived_types.safe_push (val);
1147 val->bases.safe_push (base);
549bcbd1
JH
1148 if (base->id > base_id)
1149 base_id = base->id;
eefe9a99 1150 }
549bcbd1
JH
1151 }
1152 /* Ensure that type always appears after bases. */
1153 if (insert_to_odr_array)
1154 {
eefe9a99 1155 if (odr_types_ptr)
c3284718 1156 val->id = odr_types.length ();
eefe9a99
JH
1157 vec_safe_push (odr_types_ptr, val);
1158 }
549bcbd1
JH
1159 else if (base_id > val->id)
1160 {
1161 odr_types[val->id] = 0;
1162 /* Be sure we did not recorded any derived types; these may need
1163 renumbering too. */
1164 gcc_assert (val->derived_types.length() == 0);
1165 if (odr_types_ptr)
1166 val->id = odr_types.length ();
1167 vec_safe_push (odr_types_ptr, val);
1168 }
eefe9a99
JH
1169 return val;
1170}
1171
1172/* Dump ODR type T and all its derrived type. INDENT specify indentation for
1173 recusive printing. */
1174
1175static void
1176dump_odr_type (FILE *f, odr_type t, int indent=0)
1177{
1178 unsigned int i;
1179 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1180 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
1181 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1182 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
1183 if (TYPE_NAME (t->type))
1184 {
1185 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1186 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1187 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1188 }
c3284718 1189 if (t->bases.length ())
eefe9a99
JH
1190 {
1191 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 1192 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
1193 fprintf (f, " %i", t->bases[i]->id);
1194 fprintf (f, "\n");
1195 }
c3284718 1196 if (t->derived_types.length ())
eefe9a99
JH
1197 {
1198 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 1199 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
1200 dump_odr_type (f, t->derived_types[i], indent + 1);
1201 }
1202 fprintf (f, "\n");
1203}
1204
1205/* Dump the type inheritance graph. */
1206
1207static void
1208dump_type_inheritance_graph (FILE *f)
1209{
1210 unsigned int i;
0e1474e5
JH
1211 if (!odr_types_ptr)
1212 return;
eefe9a99 1213 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 1214 for (i = 0; i < odr_types.length (); i++)
eefe9a99 1215 {
549bcbd1 1216 if (odr_types[i] && odr_types[i]->bases.length () == 0)
eefe9a99
JH
1217 dump_odr_type (f, odr_types[i]);
1218 }
c3284718 1219 for (i = 0; i < odr_types.length (); i++)
61a74079 1220 {
549bcbd1 1221 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
1222 {
1223 unsigned int j;
1224 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1225 print_node (f, "", odr_types[i]->type, 0);
c3284718 1226 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
1227 {
1228 tree t;
1229 fprintf (f, "duplicate #%i\n", j);
1230 print_node (f, "", (*odr_types[i]->types)[j], 0);
1231 t = (*odr_types[i]->types)[j];
1232 while (TYPE_P (t) && TYPE_CONTEXT (t))
1233 {
1234 t = TYPE_CONTEXT (t);
1235 print_node (f, "", t, 0);
1236 }
1237 putc ('\n',f);
1238 }
1239 }
1240 }
eefe9a99
JH
1241}
1242
1243/* Given method type T, return type of class it belongs to.
1244 Lookup this pointer and get its type. */
1245
64cbf23d 1246tree
d570d364 1247method_class_type (const_tree t)
eefe9a99
JH
1248{
1249 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 1250 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
1251
1252 return TREE_TYPE (first_parm_type);
1253}
1254
1255/* Initialize IPA devirt and build inheritance tree graph. */
1256
1257void
1258build_type_inheritance_graph (void)
1259{
b270b096 1260 struct symtab_node *n;
eefe9a99
JH
1261 FILE *inheritance_dump_file;
1262 int flags;
1263
c203e8a7 1264 if (odr_hash)
eefe9a99
JH
1265 return;
1266 timevar_push (TV_IPA_INHERITANCE);
1267 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
c203e8a7 1268 odr_hash = new odr_hash_type (23);
eefe9a99
JH
1269
1270 /* We reconstruct the graph starting of types of all methods seen in the
1271 the unit. */
b270b096 1272 FOR_EACH_SYMBOL (n)
7de90a6c 1273 if (is_a <cgraph_node *> (n)
b270b096 1274 && DECL_VIRTUAL_P (n->decl)
d52f5295 1275 && n->real_symbol_p ())
549bcbd1
JH
1276 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1277 true);
b270b096
JH
1278
1279 /* Look also for virtual tables of types that do not define any methods.
1280
1281 We need it in a case where class B has virtual base of class A
1282 re-defining its virtual method and there is class C with no virtual
1283 methods with B as virtual base.
1284
1285 Here we output B's virtual method in two variant - for non-virtual
1286 and virtual inheritance. B's virtual table has non-virtual version,
1287 while C's has virtual.
1288
1289 For this reason we need to know about C in order to include both
1290 variants of B. More correctly, record_target_from_binfo should
1291 add both variants of the method when walking B, but we have no
1292 link in between them.
1293
1294 We rely on fact that either the method is exported and thus we
1295 assume it is called externally or C is in anonymous namespace and
1296 thus we will see the vtable. */
1297
7de90a6c 1298 else if (is_a <varpool_node *> (n)
b270b096
JH
1299 && DECL_VIRTUAL_P (n->decl)
1300 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1301 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1302 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
549bcbd1 1303 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
eefe9a99
JH
1304 if (inheritance_dump_file)
1305 {
1306 dump_type_inheritance_graph (inheritance_dump_file);
1307 dump_end (TDI_inheritance, inheritance_dump_file);
1308 }
1309 timevar_pop (TV_IPA_INHERITANCE);
1310}
1311
ccb05ef2
JH
1312/* Return true if N has reference from live virtual table
1313 (and thus can be a destination of polymorphic call).
1314 Be conservatively correct when callgraph is not built or
1315 if the method may be referred externally. */
1316
1317static bool
1318referenced_from_vtable_p (struct cgraph_node *node)
1319{
1320 int i;
1321 struct ipa_ref *ref;
1322 bool found = false;
1323
1324 if (node->externally_visible
1325 || node->used_from_other_partition)
1326 return true;
1327
1328 /* Keep this test constant time.
1329 It is unlikely this can happen except for the case where speculative
1330 devirtualization introduced many speculative edges to this node.
1331 In this case the target is very likely alive anyway. */
1332 if (node->ref_list.referring.length () > 100)
1333 return true;
1334
1335 /* We need references built. */
1336 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1337 return true;
1338
d122681a 1339 for (i = 0; node->iterate_referring (i, ref); i++)
ccb05ef2
JH
1340
1341 if ((ref->use == IPA_REF_ALIAS
d52f5295 1342 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
ccb05ef2
JH
1343 || (ref->use == IPA_REF_ADDR
1344 && TREE_CODE (ref->referring->decl) == VAR_DECL
1345 && DECL_VIRTUAL_P (ref->referring->decl)))
1346 {
1347 found = true;
1348 break;
1349 }
1350 return found;
1351}
1352
68377e53 1353/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
1354 CAN_REFER specify if program can refer to the target directly.
1355 if TARGET is unknown (NULL) or it can not be inserted (for example because
1356 its body was already removed and there is no way to refer to it), clear
1357 COMPLETEP. */
eefe9a99
JH
1358
1359static void
1360maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 1361 tree target, pointer_set_t *inserted,
ec77d61f 1362 bool can_refer,
68377e53 1363 bool *completep)
eefe9a99 1364{
958c1d61
JH
1365 struct cgraph_node *target_node, *alias_target;
1366 enum availability avail;
88f592e3
JH
1367
1368 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1369 list of targets; the runtime effect of calling them is undefined.
1370 Only "real" virtual methods should be accounted. */
1371 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1372 return;
eefe9a99 1373
ec77d61f
JH
1374 if (!can_refer)
1375 {
1376 /* The only case when method of anonymous namespace becomes unreferable
1377 is when we completely optimized it out. */
1378 if (flag_ltrans
1379 || !target
88f592e3 1380 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
1381 *completep = false;
1382 return;
1383 }
1384
88f592e3 1385 if (!target)
68377e53
JH
1386 return;
1387
d52f5295 1388 target_node = cgraph_node::get (target);
68377e53 1389
958c1d61
JH
1390 /* Preffer alias target over aliases, so we do not get confused by
1391 fake duplicates. */
1392 if (target_node)
1393 {
d52f5295 1394 alias_target = target_node->ultimate_alias_target (&avail);
958c1d61
JH
1395 if (target_node != alias_target
1396 && avail >= AVAIL_AVAILABLE
d52f5295 1397 && target_node->get_availability ())
958c1d61
JH
1398 target_node = alias_target;
1399 }
1400
ccb05ef2
JH
1401 /* Method can only be called by polymorphic call if any
1402 of vtables refering to it are alive.
1403
1404 While this holds for non-anonymous functions, too, there are
1405 cases where we want to keep them in the list; for example
1406 inline functions with -fno-weak are static, but we still
1407 may devirtualize them when instance comes from other unit.
1408 The same holds for LTO.
1409
1410 Currently we ignore these functions in speculative devirtualization.
1411 ??? Maybe it would make sense to be more aggressive for LTO even
1412 eslewhere. */
1413 if (!flag_ltrans
1414 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1415 && (!target_node
1416 || !referenced_from_vtable_p (target_node)))
1417 ;
1418 /* See if TARGET is useful function we can deal with. */
1419 else if (target_node != NULL
1420 && (TREE_PUBLIC (target)
1421 || DECL_EXTERNAL (target)
1422 || target_node->definition)
d52f5295 1423 && target_node->real_symbol_p ())
0e1474e5 1424 {
68377e53 1425 gcc_assert (!target_node->global.inlined_to);
d52f5295 1426 gcc_assert (target_node->real_symbol_p ());
958c1d61 1427 if (!pointer_set_insert (inserted, target_node->decl))
68377e53
JH
1428 {
1429 pointer_set_insert (cached_polymorphic_call_targets,
1430 target_node);
1431 nodes.safe_push (target_node);
1432 }
0e1474e5 1433 }
68377e53 1434 else if (completep
2d1644bf
JH
1435 && (!type_in_anonymous_namespace_p
1436 (DECL_CONTEXT (target))
1437 || flag_ltrans))
0439a947 1438 *completep = false;
eefe9a99
JH
1439}
1440
68377e53
JH
1441/* See if BINFO's type match OUTER_TYPE. If so, lookup
1442 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
1443 method in vtable and insert method to NODES array
1444 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
1445 Otherwise recurse to base BINFOs.
1446 This match what get_binfo_at_offset does, but with offset
1447 being unknown.
1448
a3788dde
JH
1449 TYPE_BINFOS is a stack of BINFOS of types with defined
1450 virtual table seen on way from class type to BINFO.
eefe9a99
JH
1451
1452 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
1453 for virtual function in. INSERTED tracks nodes we already
1454 inserted.
3462aa02
JH
1455
1456 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
1457
1458 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
1459 */
1460
1461static void
68377e53 1462record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 1463 vec <tree> *bases_to_consider,
68377e53
JH
1464 tree binfo,
1465 tree otr_type,
a3788dde 1466 vec <tree> &type_binfos,
68377e53
JH
1467 HOST_WIDE_INT otr_token,
1468 tree outer_type,
1469 HOST_WIDE_INT offset,
1470 pointer_set_t *inserted,
1471 pointer_set_t *matched_vtables,
ec77d61f
JH
1472 bool anonymous,
1473 bool *completep)
eefe9a99
JH
1474{
1475 tree type = BINFO_TYPE (binfo);
1476 int i;
1477 tree base_binfo;
1478
eefe9a99 1479
a3788dde
JH
1480 if (BINFO_VTABLE (binfo))
1481 type_binfos.safe_push (binfo);
68377e53 1482 if (types_same_for_odr (type, outer_type))
eefe9a99 1483 {
a3788dde
JH
1484 int i;
1485 tree type_binfo = NULL;
1486
1487 /* Lookup BINFO with virtual table. For normal types it is always last
1488 binfo on stack. */
1489 for (i = type_binfos.length () - 1; i >= 0; i--)
1490 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1491 {
1492 type_binfo = type_binfos[i];
1493 break;
1494 }
1495 if (BINFO_VTABLE (binfo))
1496 type_binfos.pop ();
1497 /* If this is duplicated BINFO for base shared by virtual inheritance,
1498 we may not have its associated vtable. This is not a problem, since
1499 we will walk it on the other path. */
1500 if (!type_binfo)
6d6af792 1501 return;
68377e53
JH
1502 tree inner_binfo = get_binfo_at_offset (type_binfo,
1503 offset, otr_type);
ec77d61f
JH
1504 if (!inner_binfo)
1505 {
1506 gcc_assert (odr_violation_reported);
1507 return;
1508 }
3462aa02
JH
1509 /* For types in anonymous namespace first check if the respective vtable
1510 is alive. If not, we know the type can't be called. */
1511 if (!flag_ltrans && anonymous)
1512 {
68377e53 1513 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 1514 varpool_node *vnode;
3462aa02
JH
1515
1516 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1517 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
9041d2e6 1518 vnode = varpool_node::get (vtable);
67348ccc 1519 if (!vnode || !vnode->definition)
3462aa02
JH
1520 return;
1521 }
68377e53 1522 gcc_assert (inner_binfo);
2d1644bf
JH
1523 if (bases_to_consider
1524 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1525 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
68377e53 1526 {
ec77d61f
JH
1527 bool can_refer;
1528 tree target = gimple_get_virt_method_for_binfo (otr_token,
1529 inner_binfo,
1530 &can_refer);
2d1644bf
JH
1531 if (!bases_to_consider)
1532 maybe_record_node (nodes, target, inserted, can_refer, completep);
1533 /* Destructors are never called via construction vtables. */
1534 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1535 bases_to_consider->safe_push (target);
68377e53 1536 }
eefe9a99
JH
1537 return;
1538 }
1539
1540 /* Walk bases. */
1541 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1542 /* Walking bases that have no virtual method is pointless excercise. */
1543 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 1544 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 1545 type_binfos,
68377e53 1546 otr_token, outer_type, offset, inserted,
ec77d61f 1547 matched_vtables, anonymous, completep);
a3788dde
JH
1548 if (BINFO_VTABLE (binfo))
1549 type_binfos.pop ();
eefe9a99
JH
1550}
1551
1552/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1553 of TYPE, insert them to NODES, recurse into derived nodes.
1554 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 1555 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
1556 Clear COMPLETEP if unreferable target is found.
1557
1558 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1559 all cases where BASE_SKIPPED is true (because the base is abstract
1560 class). */
eefe9a99
JH
1561
1562static void
1563possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1564 pointer_set_t *inserted,
1565 pointer_set_t *matched_vtables,
1566 tree otr_type,
1567 odr_type type,
68377e53
JH
1568 HOST_WIDE_INT otr_token,
1569 tree outer_type,
ec77d61f 1570 HOST_WIDE_INT offset,
2d1644bf
JH
1571 bool *completep,
1572 vec <tree> &bases_to_consider,
1573 bool consider_construction)
eefe9a99
JH
1574{
1575 tree binfo = TYPE_BINFO (type->type);
1576 unsigned int i;
a3788dde 1577 vec <tree> type_binfos = vNULL;
2d1644bf
JH
1578 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1579
1580 /* We may need to consider types w/o instances because of possible derived
1581 types using their methods either directly or via construction vtables.
1582 We are safe to skip them when all derivations are known, since we will
1583 handle them later.
1584 This is done by recording them to BASES_TO_CONSIDER array. */
1585 if (possibly_instantiated || consider_construction)
1586 {
1587 record_target_from_binfo (nodes,
1588 (!possibly_instantiated
1589 && type_all_derivations_known_p (type->type))
1590 ? &bases_to_consider : NULL,
1591 binfo, otr_type, type_binfos, otr_token,
1592 outer_type, offset,
1593 inserted, matched_vtables,
1594 type->anonymous_namespace, completep);
1595 }
a3788dde 1596 type_binfos.release ();
c3284718 1597 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
1598 possible_polymorphic_call_targets_1 (nodes, inserted,
1599 matched_vtables,
1600 otr_type,
1601 type->derived_types[i],
2d1644bf
JH
1602 otr_token, outer_type, offset, completep,
1603 bases_to_consider, consider_construction);
eefe9a99
JH
1604}
1605
1606/* Cache of queries for polymorphic call targets.
1607
1608 Enumerating all call targets may get expensive when there are many
1609 polymorphic calls in the program, so we memoize all the previous
1610 queries and avoid duplicated work. */
1611
1612struct polymorphic_call_target_d
1613{
eefe9a99 1614 HOST_WIDE_INT otr_token;
68377e53
JH
1615 ipa_polymorphic_call_context context;
1616 odr_type type;
eefe9a99 1617 vec <cgraph_node *> targets;
a0fd3373 1618 int speculative_targets;
ec77d61f 1619 bool complete;
eefe9a99
JH
1620};
1621
1622/* Polymorphic call target cache helpers. */
1623
1624struct polymorphic_call_target_hasher
1625{
1626 typedef polymorphic_call_target_d value_type;
1627 typedef polymorphic_call_target_d compare_type;
1628 static inline hashval_t hash (const value_type *);
1629 static inline bool equal (const value_type *, const compare_type *);
1630 static inline void remove (value_type *);
1631};
1632
1633/* Return the computed hashcode for ODR_QUERY. */
1634
1635inline hashval_t
1636polymorphic_call_target_hasher::hash (const value_type *odr_query)
1637{
d313d45f
AK
1638 inchash::hash hstate (odr_query->otr_token);
1639
1640 hstate.add_wide_int (odr_query->type->id);
1641 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1642 hstate.add_wide_int (odr_query->context.offset);
68377e53 1643
3339f0bc
JH
1644 if (odr_query->context.speculative_outer_type)
1645 {
d313d45f
AK
1646 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1647 hstate.add_wide_int (odr_query->context.speculative_offset);
3339f0bc 1648 }
d313d45f
AK
1649 hstate.add_flag (odr_query->context.maybe_in_construction);
1650 hstate.add_flag (odr_query->context.maybe_derived_type);
1651 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1652 hstate.commit_flag ();
1653 return hstate.end ();
eefe9a99
JH
1654}
1655
1656/* Compare cache entries T1 and T2. */
1657
1658inline bool
1659polymorphic_call_target_hasher::equal (const value_type *t1,
1660 const compare_type *t2)
1661{
68377e53
JH
1662 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1663 && t1->context.offset == t2->context.offset
3339f0bc 1664 && t1->context.speculative_offset == t2->context.speculative_offset
68377e53 1665 && t1->context.outer_type == t2->context.outer_type
3339f0bc 1666 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
68377e53
JH
1667 && t1->context.maybe_in_construction
1668 == t2->context.maybe_in_construction
3339f0bc
JH
1669 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1670 && (t1->context.speculative_maybe_derived_type
1671 == t2->context.speculative_maybe_derived_type));
eefe9a99
JH
1672}
1673
1674/* Remove entry in polymorphic call target cache hash. */
1675
1676inline void
1677polymorphic_call_target_hasher::remove (value_type *v)
1678{
1679 v->targets.release ();
1680 free (v);
1681}
1682
1683/* Polymorphic call target query cache. */
1684
c203e8a7 1685typedef hash_table<polymorphic_call_target_hasher>
eefe9a99 1686 polymorphic_call_target_hash_type;
c203e8a7 1687static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
eefe9a99
JH
1688
1689/* Destroy polymorphic call target query cache. */
1690
1691static void
1692free_polymorphic_call_targets_hash ()
1693{
0e1474e5
JH
1694 if (cached_polymorphic_call_targets)
1695 {
c203e8a7
TS
1696 delete polymorphic_call_target_hash;
1697 polymorphic_call_target_hash = NULL;
0e1474e5
JH
1698 pointer_set_destroy (cached_polymorphic_call_targets);
1699 cached_polymorphic_call_targets = NULL;
1700 }
eefe9a99
JH
1701}
1702
1703/* When virtual function is removed, we may need to flush the cache. */
1704
1705static void
1706devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1707{
0e1474e5
JH
1708 if (cached_polymorphic_call_targets
1709 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
1710 free_polymorphic_call_targets_hash ();
1711}
1712
d570d364
JH
1713/* Return true when TYPE contains an polymorphic type and thus is interesting
1714 for devirtualization machinery. */
1715
1716bool
1717contains_polymorphic_type_p (const_tree type)
1718{
1719 type = TYPE_MAIN_VARIANT (type);
1720
1721 if (RECORD_OR_UNION_TYPE_P (type))
1722 {
1723 if (TYPE_BINFO (type)
1724 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1725 return true;
1726 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1727 if (TREE_CODE (fld) == FIELD_DECL
1728 && !DECL_ARTIFICIAL (fld)
1729 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1730 return true;
1731 return false;
1732 }
1733 if (TREE_CODE (type) == ARRAY_TYPE)
1734 return contains_polymorphic_type_p (TREE_TYPE (type));
1735 return false;
1736}
1737
68377e53
JH
1738/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1739 is contained at CONTEXT->OFFSET. Walk the memory representation of
1740 CONTEXT->OUTER_TYPE and find the outermost class type that match
1741 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1742 to represent it.
1743
1744 For example when CONTEXT represents type
1745 class A
1746 {
1747 int a;
1748 class B b;
1749 }
1750 and we look for type at offset sizeof(int), we end up with B and offset 0.
1751 If the same is produced by multiple inheritance, we end up with A and offset
1752 sizeof(int).
1753
1754 If we can not find corresponding class, give up by setting
1755 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1756 Return true when lookup was sucesful. */
1757
1758static bool
1759get_class_context (ipa_polymorphic_call_context *context,
1760 tree expected_type)
1761{
1762 tree type = context->outer_type;
1763 HOST_WIDE_INT offset = context->offset;
3339f0bc
JH
1764 bool speculative = false;
1765 bool speculation_valid = false;
1766 bool valid = false;
1767
1768 if (!context->outer_type)
1769 {
a0fd3373
JH
1770 type = context->outer_type = expected_type;
1771 context->offset = offset = 0;
3339f0bc
JH
1772 }
1773 /* See if speculative type seem to be derrived from outer_type.
1774 Then speculation is valid only if it really is a derivate and derived types
1775 are allowed.
1776
1777 The test does not really look for derivate, but also accepts the case where
1778 outer_type is a field of speculative_outer_type. In this case eiter
1779 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1780 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1781 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1782 if (context->speculative_outer_type
1783 && context->speculative_offset >= context->offset
1784 && contains_type_p (context->speculative_outer_type,
1785 context->offset - context->speculative_offset,
1786 context->outer_type))
1787 speculation_valid = context->maybe_derived_type;
1788 else
1789 {
1790 context->speculative_outer_type = NULL;
1791 context->speculative_offset = 0;
1792 context->speculative_maybe_derived_type = false;
1793 }
1794
68377e53 1795 /* Find the sub-object the constant actually refers to and mark whether it is
3339f0bc
JH
1796 an artificial one (as opposed to a user-defined one).
1797
1798 This loop is performed twice; first time for outer_type and second time
1799 for speculative_outer_type. The second iteration has SPECULATIVE set. */
68377e53
JH
1800 while (true)
1801 {
1802 HOST_WIDE_INT pos, size;
1803 tree fld;
1804
1805 /* On a match, just return what we found. */
1806 if (TREE_CODE (type) == TREE_CODE (expected_type)
a0fd3373
JH
1807 && (!in_lto_p
1808 || (TREE_CODE (type) == RECORD_TYPE
1809 && TYPE_BINFO (type)
1810 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
68377e53
JH
1811 && types_same_for_odr (type, expected_type))
1812 {
3339f0bc
JH
1813 if (speculative)
1814 {
1815 gcc_assert (speculation_valid);
1816 gcc_assert (valid);
1817
1818 /* If we did not match the offset, just give up on speculation. */
1819 if (offset != 0
1820 || (types_same_for_odr (context->speculative_outer_type,
1821 context->outer_type)
1822 && (context->maybe_derived_type
1823 == context->speculative_maybe_derived_type)))
1824 {
1825 context->speculative_outer_type = NULL;
1826 context->speculative_offset = 0;
1827 }
1828 return true;
1829 }
1830 else
1831 {
1832 /* Type can not contain itself on an non-zero offset. In that case
1833 just give up. */
1834 if (offset != 0)
1835 {
1836 valid = false;
1837 goto give_up;
1838 }
1839 valid = true;
1840 /* If speculation is not valid or we determined type precisely,
1841 we are done. */
1842 if (!speculation_valid
1843 || !context->maybe_derived_type)
1844 {
1845 context->speculative_outer_type = NULL;
1846 context->speculative_offset = 0;
1847 return true;
1848 }
1849 /* Otherwise look into speculation now. */
1850 else
1851 {
1852 speculative = true;
1853 type = context->speculative_outer_type;
1854 offset = context->speculative_offset;
1855 continue;
1856 }
1857 }
68377e53
JH
1858 }
1859
1860 /* Walk fields and find corresponding on at OFFSET. */
1861 if (TREE_CODE (type) == RECORD_TYPE)
1862 {
1863 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1864 {
1865 if (TREE_CODE (fld) != FIELD_DECL)
1866 continue;
1867
1868 pos = int_bit_position (fld);
1869 size = tree_to_uhwi (DECL_SIZE (fld));
1870 if (pos <= offset && (pos + size) > offset)
1871 break;
1872 }
1873
1874 if (!fld)
1875 goto give_up;
1876
c7e1befa 1877 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
68377e53
JH
1878 offset -= pos;
1879 /* DECL_ARTIFICIAL represents a basetype. */
1880 if (!DECL_ARTIFICIAL (fld))
1881 {
3339f0bc
JH
1882 if (!speculative)
1883 {
1884 context->outer_type = type;
1885 context->offset = offset;
1886 /* As soon as we se an field containing the type,
1887 we know we are not looking for derivations. */
1888 context->maybe_derived_type = false;
1889 }
1890 else
1891 {
1892 context->speculative_outer_type = type;
1893 context->speculative_offset = offset;
1894 context->speculative_maybe_derived_type = false;
1895 }
68377e53
JH
1896 }
1897 }
1898 else if (TREE_CODE (type) == ARRAY_TYPE)
1899 {
c7e1befa 1900 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
68377e53
JH
1901
1902 /* Give up if we don't know array size. */
1903 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1904 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1905 goto give_up;
1906 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1907 type = subtype;
3339f0bc
JH
1908 if (!speculative)
1909 {
1910 context->outer_type = type;
1911 context->offset = offset;
1912 context->maybe_derived_type = false;
1913 }
1914 else
1915 {
1916 context->speculative_outer_type = type;
1917 context->speculative_offset = offset;
1918 context->speculative_maybe_derived_type = false;
1919 }
68377e53
JH
1920 }
1921 /* Give up on anything else. */
1922 else
1923 goto give_up;
1924 }
1925
1926 /* If we failed to find subtype we look for, give up and fall back to the
1927 most generic query. */
1928give_up:
3339f0bc
JH
1929 context->speculative_outer_type = NULL;
1930 context->speculative_offset = 0;
1931 context->speculative_maybe_derived_type = false;
1932 if (valid)
1933 return true;
68377e53
JH
1934 context->outer_type = expected_type;
1935 context->offset = 0;
1936 context->maybe_derived_type = true;
e400f081
JH
1937 context->maybe_in_construction = true;
1938 /* POD can be changed to an instance of a polymorphic type by
1939 placement new. Here we play safe and assume that any
1940 non-polymorphic type is POD. */
1941 if ((TREE_CODE (type) != RECORD_TYPE
1942 || !TYPE_BINFO (type)
1943 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
3339f0bc
JH
1944 && (!TYPE_SIZE (type)
1945 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
e400f081
JH
1946 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1947 tree_to_uhwi (TYPE_SIZE (type)))))
1948 return true;
68377e53
JH
1949 return false;
1950}
1951
1952/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1953
1954static bool
1955contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1956 tree otr_type)
1957{
3339f0bc 1958 ipa_polymorphic_call_context context = {offset, 0,
c7e1befa 1959 TYPE_MAIN_VARIANT (outer_type),
3339f0bc 1960 NULL, false, true, false};
68377e53
JH
1961 return get_class_context (&context, otr_type);
1962}
1963
390675c8
JH
1964/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1965
1966static tree
85942f45
JH
1967subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1968 tree vtable)
390675c8
JH
1969{
1970 tree v = BINFO_VTABLE (binfo);
1971 int i;
1972 tree base_binfo;
85942f45 1973 unsigned HOST_WIDE_INT this_offset;
390675c8 1974
85942f45
JH
1975 if (v)
1976 {
1977 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1978 gcc_unreachable ();
1979
1980 if (offset == this_offset
1981 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1982 return binfo;
1983 }
390675c8 1984
390675c8
JH
1985 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1986 if (polymorphic_type_binfo_p (base_binfo))
1987 {
1988 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1989 if (base_binfo)
1990 return base_binfo;
1991 }
1992 return NULL;
1993}
1994
85942f45
JH
1995/* T is known constant value of virtual table pointer.
1996 Store virtual table to V and its offset to OFFSET.
1997 Return false if T does not look like virtual table reference. */
390675c8 1998
85942f45 1999bool
d570d364
JH
2000vtable_pointer_value_to_vtable (const_tree t, tree *v,
2001 unsigned HOST_WIDE_INT *offset)
390675c8
JH
2002{
2003 /* We expect &MEM[(void *)&virtual_table + 16B].
2004 We obtain object's BINFO from the context of the virtual table.
2005 This one contains pointer to virtual table represented via
2006 POINTER_PLUS_EXPR. Verify that this pointer match to what
2007 we propagated through.
2008
2009 In the case of virtual inheritance, the virtual tables may
2010 be nested, i.e. the offset may be different from 16 and we may
2011 need to dive into the type representation. */
85942f45 2012 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
2013 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2014 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2015 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2016 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2017 == VAR_DECL)
2018 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2019 (TREE_OPERAND (t, 0), 0), 0)))
2020 {
85942f45
JH
2021 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2022 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2023 return true;
390675c8 2024 }
85942f45
JH
2025
2026 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2027 We need to handle it when T comes from static variable initializer or
2028 BINFO. */
2029 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2030 {
2031 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2032 t = TREE_OPERAND (t, 0);
2033 }
2034 else
2035 *offset = 0;
2036
2037 if (TREE_CODE (t) != ADDR_EXPR)
2038 return false;
2039 *v = TREE_OPERAND (t, 0);
2040 return true;
2041}
2042
2043/* T is known constant value of virtual table pointer. Return BINFO of the
2044 instance type. */
2045
2046tree
d570d364 2047vtable_pointer_value_to_binfo (const_tree t)
85942f45
JH
2048{
2049 tree vtable;
2050 unsigned HOST_WIDE_INT offset;
2051
2052 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2053 return NULL_TREE;
2054
2055 /* FIXME: for stores of construction vtables we return NULL,
2056 because we do not have BINFO for those. Eventually we should fix
2057 our representation to allow this case to be handled, too.
2058 In the case we see store of BINFO we however may assume
2059 that standard folding will be ale to cope with it. */
2060 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2061 offset, vtable);
390675c8
JH
2062}
2063
058d0a90
JH
2064/* We know that the instance is stored in variable or parameter
2065 (not dynamically allocated) and we want to disprove the fact
2066 that it may be in construction at invocation of CALL.
2067
2068 For the variable to be in construction we actually need to
2069 be in constructor of corresponding global variable or
2070 the inline stack of CALL must contain the constructor.
2071 Check this condition. This check works safely only before
2072 IPA passes, because inline stacks may become out of date
2073 later. */
2074
2075bool
2076decl_maybe_in_construction_p (tree base, tree outer_type,
2077 gimple call, tree function)
2078{
2079 outer_type = TYPE_MAIN_VARIANT (outer_type);
2080 gcc_assert (DECL_P (base));
2081
2082 /* After inlining the code unification optimizations may invalidate
2083 inline stacks. Also we need to give up on global variables after
2084 IPA, because addresses of these may have been propagated to their
2085 constructors. */
2086 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2087 return true;
2088
2089 /* Pure functions can not do any changes on the dynamic type;
2090 that require writting to memory. */
2091 if (!auto_var_in_fn_p (base, function)
2092 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2093 return false;
2094
2095 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2096 block = BLOCK_SUPERCONTEXT (block))
2097 if (BLOCK_ABSTRACT_ORIGIN (block)
2098 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2099 {
2100 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2101
2102 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2103 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2104 || !DECL_CXX_DESTRUCTOR_P (fn)))
2105 {
2106 /* Watch for clones where we constant propagated the first
2107 argument (pointer to the instance). */
2108 fn = DECL_ABSTRACT_ORIGIN (fn);
2109 if (!fn
2110 || !is_global_var (base)
2111 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2112 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2113 || !DECL_CXX_DESTRUCTOR_P (fn)))
2114 continue;
2115 }
2116 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2117 continue;
2118
2119 /* FIXME: this can go away once we have ODR types equivalency on
2120 LTO level. */
2121 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2122 return true;
2123 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2124 if (types_same_for_odr (type, outer_type))
2125 return true;
2126 }
2127
2128 if (TREE_CODE (base) == VAR_DECL
2129 && is_global_var (base))
2130 {
2131 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2132 || (!DECL_CXX_CONSTRUCTOR_P (function)
2133 || !DECL_CXX_DESTRUCTOR_P (function)))
2134 {
2135 if (!DECL_ABSTRACT_ORIGIN (function))
2136 return false;
2137 /* Watch for clones where we constant propagated the first
2138 argument (pointer to the instance). */
2139 function = DECL_ABSTRACT_ORIGIN (function);
2140 if (!function
2141 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2142 || (!DECL_CXX_CONSTRUCTOR_P (function)
2143 || !DECL_CXX_DESTRUCTOR_P (function)))
2144 return false;
2145 }
2146 /* FIXME: this can go away once we have ODR types equivalency on
2147 LTO level. */
2148 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2149 return true;
2150 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2151 if (types_same_for_odr (type, outer_type))
2152 return true;
2153 }
2154 return false;
2155}
2156
5bccb77a
JH
2157/* Proudce polymorphic call context for call method of instance
2158 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2159
2160static void
2161get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2162 tree base, HOST_WIDE_INT offset)
2163{
2164 gcc_assert (DECL_P (base));
2165
c7e1befa 2166 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
5bccb77a 2167 context->offset = offset;
3339f0bc
JH
2168 context->speculative_outer_type = NULL;
2169 context->speculative_offset = 0;
2170 context->speculative_maybe_derived_type = true;
5bccb77a
JH
2171 /* Make very conservative assumption that all objects
2172 may be in construction.
2173 TODO: ipa-prop already contains code to tell better.
2174 merge it later. */
2175 context->maybe_in_construction = true;
2176 context->maybe_derived_type = false;
2177}
2178
2179/* CST is an invariant (address of decl), try to get meaningful
2180 polymorphic call context for polymorphic call of method
2181 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2182 Return FALSE if nothing meaningful can be found. */
2183
2184bool
2185get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2186 tree cst,
2187 tree otr_type,
2188 HOST_WIDE_INT offset)
2189{
2190 HOST_WIDE_INT offset2, size, max_size;
2191 tree base;
2192
2193 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 2194 return false;
5bccb77a
JH
2195
2196 cst = TREE_OPERAND (cst, 0);
2197 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
2198 if (!DECL_P (base) || max_size == -1 || max_size != size)
2199 return false;
5bccb77a
JH
2200
2201 /* Only type inconsistent programs can have otr_type that is
2202 not part of outer type. */
79c7de84
EB
2203 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2204 return false;
5bccb77a 2205
79c7de84 2206 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
2207 return true;
2208}
2209
3339f0bc
JH
2210/* See if OP is SSA name initialized as a copy or by single assignment.
2211 If so, walk the SSA graph up. */
2212
2213static tree
2214walk_ssa_copies (tree op)
2215{
2216 STRIP_NOPS (op);
2217 while (TREE_CODE (op) == SSA_NAME
2218 && !SSA_NAME_IS_DEFAULT_DEF (op)
2219 && SSA_NAME_DEF_STMT (op)
2220 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2221 {
2222 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2223 return op;
2224 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2225 STRIP_NOPS (op);
2226 }
2227 return op;
2228}
2229
68377e53
JH
2230/* Given REF call in FNDECL, determine class of the polymorphic
2231 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
058d0a90
JH
2232 CALL is optional argument giving the actual statement (usually call) where
2233 the context is used.
68377e53
JH
2234 Return pointer to object described by the context */
2235
2236tree
2237get_polymorphic_call_info (tree fndecl,
2238 tree ref,
2239 tree *otr_type,
2240 HOST_WIDE_INT *otr_token,
058d0a90
JH
2241 ipa_polymorphic_call_context *context,
2242 gimple call)
68377e53
JH
2243{
2244 tree base_pointer;
2245 *otr_type = obj_type_ref_class (ref);
2246 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2247
2248 /* Set up basic info in case we find nothing interesting in the analysis. */
3339f0bc
JH
2249 context->speculative_outer_type = NULL;
2250 context->speculative_offset = 0;
2251 context->speculative_maybe_derived_type = true;
c7e1befa 2252 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
68377e53
JH
2253 context->offset = 0;
2254 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2255 context->maybe_derived_type = true;
2d1644bf 2256 context->maybe_in_construction = true;
68377e53
JH
2257
2258 /* Walk SSA for outer object. */
2259 do
2260 {
3339f0bc
JH
2261 base_pointer = walk_ssa_copies (base_pointer);
2262 if (TREE_CODE (base_pointer) == ADDR_EXPR)
68377e53
JH
2263 {
2264 HOST_WIDE_INT size, max_size;
2265 HOST_WIDE_INT offset2;
2266 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2267 &offset2, &size, &max_size);
2268
2269 /* If this is a varying address, punt. */
2270 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2271 && max_size != -1
2272 && max_size == size)
2273 {
2274 /* We found dereference of a pointer. Type of the pointer
2275 and MEM_REF is meaningless, but we can look futher. */
2276 if (TREE_CODE (base) == MEM_REF)
2277 {
2278 base_pointer = TREE_OPERAND (base, 0);
2279 context->offset
807e902e 2280 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
2281 context->outer_type = NULL;
2282 }
2283 /* We found base object. In this case the outer_type
2284 is known. */
2285 else if (DECL_P (base))
2286 {
7656ee72 2287 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
2288
2289 /* Only type inconsistent programs can have otr_type that is
2290 not part of outer type. */
7656ee72
JH
2291 if (!contains_type_p (TREE_TYPE (base),
2292 context->offset + offset2, *otr_type))
3e86c6a8
JH
2293 {
2294 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2295 code sequences; we arrange the calls to be builtin_unreachable
2296 later. */
2297 *otr_token = INT_MAX;
2298 return base_pointer;
2299 }
5bccb77a
JH
2300 get_polymorphic_call_info_for_decl (context, base,
2301 context->offset + offset2);
058d0a90
JH
2302 if (context->maybe_in_construction && call)
2303 context->maybe_in_construction
2304 = decl_maybe_in_construction_p (base,
2305 context->outer_type,
2306 call,
2307 current_function_decl);
3339f0bc 2308 return base_pointer;
68377e53
JH
2309 }
2310 else
2311 break;
2312 }
2313 else
2314 break;
2315 }
2316 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2317 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2318 {
2319 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2320 * BITS_PER_UNIT;
2321 base_pointer = TREE_OPERAND (base_pointer, 0);
2322 }
2323 else
2324 break;
2325 }
2326 while (true);
2327
2328 /* Try to determine type of the outer object. */
2329 if (TREE_CODE (base_pointer) == SSA_NAME
2330 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2331 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2332 {
2333 /* See if parameter is THIS pointer of a method. */
2334 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2335 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2336 {
c7e1befa
JH
2337 context->outer_type
2338 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2339 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2340
2341 /* Dynamic casting has possibly upcasted the type
2342 in the hiearchy. In this case outer type is less
2343 informative than inner type and we should forget
2344 about it. */
2345 if (!contains_type_p (context->outer_type, context->offset,
2346 *otr_type))
2347 {
2348 context->outer_type = NULL;
2349 return base_pointer;
2350 }
2351
2352 /* If the function is constructor or destructor, then
d74db8ff 2353 the type is possibly in construction, but we know
68377e53
JH
2354 it is not derived type. */
2355 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2356 || DECL_CXX_DESTRUCTOR_P (fndecl))
2357 {
2358 context->maybe_in_construction = true;
2359 context->maybe_derived_type = false;
2360 }
2361 else
2362 {
2363 context->maybe_derived_type = true;
2364 context->maybe_in_construction = false;
2365 }
2366 return base_pointer;
2367 }
2368 /* Non-PODs passed by value are really passed by invisible
2369 reference. In this case we also know the type of the
2370 object. */
2371 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2372 {
c7e1befa
JH
2373 context->outer_type
2374 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2375 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2376 /* Only type inconsistent programs can have otr_type that is
2377 not part of outer type. */
2378 if (!contains_type_p (context->outer_type, context->offset,
2379 *otr_type))
2380 {
3e86c6a8
JH
2381 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2382 code sequences; we arrange the calls to be builtin_unreachable
2383 later. */
2384 *otr_token = INT_MAX;
68377e53
JH
2385 return base_pointer;
2386 }
2387 context->maybe_derived_type = false;
2388 context->maybe_in_construction = false;
2389 return base_pointer;
2390 }
2391 }
3339f0bc
JH
2392
2393 tree base_type = TREE_TYPE (base_pointer);
2394
2395 if (TREE_CODE (base_pointer) == SSA_NAME
2396 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2397 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2398 {
2399 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2400 code sequences; we arrange the calls to be builtin_unreachable
2401 later. */
2402 *otr_token = INT_MAX;
2403 return base_pointer;
2404 }
2405 if (TREE_CODE (base_pointer) == SSA_NAME
2406 && SSA_NAME_DEF_STMT (base_pointer)
2407 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2408 base_type = TREE_TYPE (gimple_assign_rhs1
2409 (SSA_NAME_DEF_STMT (base_pointer)));
2410
2411 if (POINTER_TYPE_P (base_type)
2412 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2413 context->offset,
2414 *otr_type))
2415 {
2416 context->speculative_outer_type = TYPE_MAIN_VARIANT
2417 (TREE_TYPE (base_type));
2418 context->speculative_offset = context->offset;
2419 context->speculative_maybe_derived_type = true;
2420 }
68377e53
JH
2421 /* TODO: There are multiple ways to derive a type. For instance
2422 if BASE_POINTER is passed to an constructor call prior our refernece.
2423 We do not make this type of flow sensitive analysis yet. */
2424 return base_pointer;
2425}
2426
2427/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2428 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2429 and insert them to NODES.
2430
2431 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2432
2433static void
2434record_targets_from_bases (tree otr_type,
2435 HOST_WIDE_INT otr_token,
2436 tree outer_type,
2437 HOST_WIDE_INT offset,
ec77d61f 2438 vec <cgraph_node *> &nodes,
68377e53
JH
2439 pointer_set_t *inserted,
2440 pointer_set_t *matched_vtables,
2441 bool *completep)
2442{
2443 while (true)
2444 {
2445 HOST_WIDE_INT pos, size;
2446 tree base_binfo;
2447 tree fld;
2448
2449 if (types_same_for_odr (outer_type, otr_type))
2450 return;
2451
2452 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2453 {
2454 if (TREE_CODE (fld) != FIELD_DECL)
2455 continue;
2456
2457 pos = int_bit_position (fld);
2458 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
2459 if (pos <= offset && (pos + size) > offset
2460 /* Do not get confused by zero sized bases. */
2461 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
2462 break;
2463 }
2464 /* Within a class type we should always find correcponding fields. */
2465 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2466
2467 /* Nonbasetypes should have been stripped by outer_class_type. */
2468 gcc_assert (DECL_ARTIFICIAL (fld));
2469
2470 outer_type = TREE_TYPE (fld);
2471 offset -= pos;
2472
2473 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2474 offset, otr_type);
ec77d61f
JH
2475 if (!base_binfo)
2476 {
2477 gcc_assert (odr_violation_reported);
2478 return;
2479 }
68377e53
JH
2480 gcc_assert (base_binfo);
2481 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
2482 {
ec77d61f
JH
2483 bool can_refer;
2484 tree target = gimple_get_virt_method_for_binfo (otr_token,
2485 base_binfo,
2486 &can_refer);
2d1644bf
JH
2487 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2488 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
2489 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
2490 }
2491 }
2492}
2493
3462aa02
JH
2494/* When virtual table is removed, we may need to flush the cache. */
2495
2496static void
2c8326a5 2497devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
2498 void *d ATTRIBUTE_UNUSED)
2499{
2500 if (cached_polymorphic_call_targets
67348ccc
DM
2501 && DECL_VIRTUAL_P (n->decl)
2502 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
2503 free_polymorphic_call_targets_hash ();
2504}
2505
eefe9a99 2506/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
2507 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2508 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2509 OTR_TYPE and include their virtual method. This is useful for types
2510 possibly in construction or destruction where the virtual table may
2511 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2512 us to walk the inheritance graph for all derivations.
2513
3e86c6a8
JH
2514 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2515 undefined and should be redirected to unreachable.
2516
add5c763 2517 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
2518 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2519 in the target cache. If user needs to visit every target list
2520 just once, it can memoize them.
2521
a0fd3373
JH
2522 SPECULATION_TARGETS specify number of targets that are speculatively
2523 likely. These include targets specified by the speculative part
2524 of polymoprhic call context and also exclude all targets for classes
2525 in construction.
ec77d61f 2526
eefe9a99
JH
2527 Returned vector is placed into cache. It is NOT caller's responsibility
2528 to free it. The vector can be freed on cgraph_remove_node call if
2529 the particular node is a virtual function present in the cache. */
2530
2531vec <cgraph_node *>
2532possible_polymorphic_call_targets (tree otr_type,
2533 HOST_WIDE_INT otr_token,
68377e53
JH
2534 ipa_polymorphic_call_context context,
2535 bool *completep,
ec77d61f 2536 void **cache_token,
a0fd3373 2537 int *speculative_targetsp)
eefe9a99
JH
2538{
2539 static struct cgraph_node_hook_list *node_removal_hook_holder;
2540 pointer_set_t *inserted;
2541 pointer_set_t *matched_vtables;
add5c763 2542 vec <cgraph_node *> nodes = vNULL;
2d1644bf 2543 vec <tree> bases_to_consider = vNULL;
68377e53 2544 odr_type type, outer_type;
eefe9a99
JH
2545 polymorphic_call_target_d key;
2546 polymorphic_call_target_d **slot;
2547 unsigned int i;
2548 tree binfo, target;
ec77d61f
JH
2549 bool complete;
2550 bool can_refer;
2d1644bf 2551 bool skipped = false;
eefe9a99 2552
c7e1befa
JH
2553 otr_type = TYPE_MAIN_VARIANT (otr_type);
2554
3e86c6a8 2555 /* If ODR is not initialized, return empty incomplete list. */
c203e8a7 2556 if (!odr_hash)
79c7de84
EB
2557 {
2558 if (completep)
2559 *completep = false;
beb683ab
MJ
2560 if (cache_token)
2561 *cache_token = NULL;
a0fd3373
JH
2562 if (speculative_targetsp)
2563 *speculative_targetsp = 0;
79c7de84
EB
2564 return nodes;
2565 }
add5c763 2566
3e86c6a8
JH
2567 /* If we hit type inconsistency, just return empty list of targets. */
2568 if (otr_token == INT_MAX)
2569 {
2570 if (completep)
2571 *completep = true;
beb683ab
MJ
2572 if (cache_token)
2573 *cache_token = NULL;
a0fd3373
JH
2574 if (speculative_targetsp)
2575 *speculative_targetsp = 0;
3e86c6a8
JH
2576 return nodes;
2577 }
2578
68377e53 2579 type = get_odr_type (otr_type, true);
eefe9a99 2580
c7e1befa
JH
2581 /* Recording type variants would wast results cache. */
2582 gcc_assert (!context.outer_type
2583 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2584
68377e53 2585 /* Lookup the outer class type we want to walk. */
a0fd3373 2586 if ((context.outer_type || context.speculative_outer_type)
3e86c6a8
JH
2587 && !get_class_context (&context, otr_type))
2588 {
2589 if (completep)
2590 *completep = false;
beb683ab
MJ
2591 if (cache_token)
2592 *cache_token = NULL;
a0fd3373
JH
2593 if (speculative_targetsp)
2594 *speculative_targetsp = 0;
3e86c6a8
JH
2595 return nodes;
2596 }
eefe9a99 2597
c7e1befa
JH
2598 /* Check that get_class_context kept the main variant. */
2599 gcc_assert (!context.outer_type
2600 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2601
79c7de84 2602 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
2603
2604 /* Without outer type, we have no use for offset. Just do the
2605 basic search from innter type */
2606 if (!context.outer_type)
2607 {
2608 context.outer_type = otr_type;
2609 context.offset = 0;
2610 }
2611 /* We need to update our hiearchy if the type does not exist. */
2612 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 2613 /* If the type is complete, there are no derivations. */
68377e53
JH
2614 if (TYPE_FINAL_P (outer_type->type))
2615 context.maybe_derived_type = false;
eefe9a99
JH
2616
2617 /* Initialize query cache. */
2618 if (!cached_polymorphic_call_targets)
2619 {
2620 cached_polymorphic_call_targets = pointer_set_create ();
c203e8a7
TS
2621 polymorphic_call_target_hash
2622 = new polymorphic_call_target_hash_type (23);
eefe9a99 2623 if (!node_removal_hook_holder)
3462aa02
JH
2624 {
2625 node_removal_hook_holder =
2626 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2627 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2628 NULL);
2629 }
eefe9a99
JH
2630 }
2631
2632 /* Lookup cached answer. */
2633 key.type = type;
2634 key.otr_token = otr_token;
68377e53 2635 key.context = context;
c203e8a7 2636 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
eefe9a99
JH
2637 if (cache_token)
2638 *cache_token = (void *)*slot;
2639 if (*slot)
68377e53
JH
2640 {
2641 if (completep)
ec77d61f 2642 *completep = (*slot)->complete;
a0fd3373
JH
2643 if (speculative_targetsp)
2644 *speculative_targetsp = (*slot)->speculative_targets;
68377e53
JH
2645 return (*slot)->targets;
2646 }
2647
ec77d61f 2648 complete = true;
eefe9a99
JH
2649
2650 /* Do actual search. */
2651 timevar_push (TV_IPA_VIRTUAL_CALL);
2652 *slot = XCNEW (polymorphic_call_target_d);
2653 if (cache_token)
68377e53 2654 *cache_token = (void *)*slot;
eefe9a99
JH
2655 (*slot)->type = type;
2656 (*slot)->otr_token = otr_token;
68377e53 2657 (*slot)->context = context;
a0fd3373 2658 (*slot)->speculative_targets = 0;
eefe9a99
JH
2659
2660 inserted = pointer_set_create ();
2661 matched_vtables = pointer_set_create ();
2662
a0fd3373
JH
2663 if (context.speculative_outer_type)
2664 {
2665 odr_type speculative_outer_type;
2666 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2667 if (TYPE_FINAL_P (speculative_outer_type->type))
2668 context.speculative_maybe_derived_type = false;
2669 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2670 context.speculative_offset, otr_type);
2671 if (binfo)
2672 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2673 &can_refer);
2674 else
2675 target = NULL;
2676
2677 if (target)
2678 {
2679 /* In the case we get complete method, we don't need
2680 to walk derivations. */
2681 if (DECL_FINAL_P (target))
2682 context.speculative_maybe_derived_type = false;
2683 }
2684 if (type_possibly_instantiated_p (speculative_outer_type->type))
2685 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2686 if (binfo)
2687 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2688 /* Next walk recursively all derived types. */
2689 if (context.speculative_maybe_derived_type)
2690 {
2691 /* For anonymous namespace types we can attempt to build full type.
2692 All derivations must be in this unit (unless we see partial unit). */
2693 if (!type->all_derivations_known)
2694 complete = false;
2695 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2696 possible_polymorphic_call_targets_1 (nodes, inserted,
2697 matched_vtables,
2698 otr_type,
2699 speculative_outer_type->derived_types[i],
2700 otr_token, speculative_outer_type->type,
2701 context.speculative_offset, &complete,
2702 bases_to_consider,
2703 false);
2704 }
2705 /* Finally walk bases, if asked to. */
2706 (*slot)->speculative_targets = nodes.length();
2707 }
2708
eefe9a99 2709 /* First see virtual method of type itself. */
68377e53
JH
2710 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2711 context.offset, otr_type);
ec77d61f
JH
2712 if (binfo)
2713 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2714 &can_refer);
2715 else
68377e53 2716 {
ec77d61f
JH
2717 gcc_assert (odr_violation_reported);
2718 target = NULL;
2719 }
68377e53 2720
2d1644bf
JH
2721 /* Destructors are never called through construction virtual tables,
2722 because the type is always known. */
2723 if (target && DECL_CXX_DESTRUCTOR_P (target))
2724 context.maybe_in_construction = false;
ec77d61f
JH
2725
2726 if (target)
2727 {
2728 /* In the case we get complete method, we don't need
68377e53
JH
2729 to walk derivations. */
2730 if (DECL_FINAL_P (target))
2731 context.maybe_derived_type = false;
2732 }
2d1644bf
JH
2733
2734 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2735 if (type_possibly_instantiated_p (outer_type->type))
2736 maybe_record_node (nodes, target, inserted, can_refer, &complete);
ec77d61f 2737 else
2d1644bf
JH
2738 {
2739 skipped = true;
2740 gcc_assert (in_lto_p || context.maybe_derived_type);
2741 }
79c7de84 2742
549bcbd1
JH
2743 if (binfo)
2744 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
eefe9a99 2745
ec77d61f 2746 /* Next walk recursively all derived types. */
68377e53
JH
2747 if (context.maybe_derived_type)
2748 {
2749 /* For anonymous namespace types we can attempt to build full type.
2750 All derivations must be in this unit (unless we see partial unit). */
2d1644bf 2751 if (!type->all_derivations_known)
ec77d61f 2752 complete = false;
68377e53
JH
2753 for (i = 0; i < outer_type->derived_types.length(); i++)
2754 possible_polymorphic_call_targets_1 (nodes, inserted,
2755 matched_vtables,
79c7de84
EB
2756 otr_type,
2757 outer_type->derived_types[i],
68377e53 2758 otr_token, outer_type->type,
2d1644bf
JH
2759 context.offset, &complete,
2760 bases_to_consider,
2761 context.maybe_in_construction);
68377e53 2762 }
79c7de84 2763
ec77d61f 2764 /* Finally walk bases, if asked to. */
a0fd3373
JH
2765 if (!(*slot)->speculative_targets)
2766 (*slot)->speculative_targets = nodes.length();
2d1644bf
JH
2767
2768 /* Destructors are never called through construction virtual tables,
2769 because the type is always known. One of entries may be cxa_pure_virtual
2770 so look to at least two of them. */
2771 if (context.maybe_in_construction)
2772 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2773 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2774 context.maybe_in_construction = false;
ec77d61f 2775 if (context.maybe_in_construction)
2d1644bf
JH
2776 {
2777 if (type != outer_type
2778 && (!skipped
2779 || (context.maybe_derived_type
2780 && !type_all_derivations_known_p (outer_type->type))))
2781 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2782 context.offset, nodes, inserted,
2783 matched_vtables, &complete);
2784 if (skipped)
2785 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2786 for (i = 0; i < bases_to_consider.length(); i++)
2787 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2788 }
2789 bases_to_consider.release();
ec77d61f 2790
eefe9a99 2791 (*slot)->targets = nodes;
ec77d61f 2792 (*slot)->complete = complete;
68377e53 2793 if (completep)
ec77d61f 2794 *completep = complete;
a0fd3373
JH
2795 if (speculative_targetsp)
2796 *speculative_targetsp = (*slot)->speculative_targets;
eefe9a99
JH
2797
2798 pointer_set_destroy (inserted);
2799 pointer_set_destroy (matched_vtables);
2800 timevar_pop (TV_IPA_VIRTUAL_CALL);
2801 return nodes;
2802}
2803
2804/* Dump all possible targets of a polymorphic call. */
2805
2806void
2807dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
2808 tree otr_type,
2809 HOST_WIDE_INT otr_token,
2810 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
2811{
2812 vec <cgraph_node *> targets;
2813 bool final;
549bcbd1 2814 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
eefe9a99 2815 unsigned int i;
a0fd3373 2816 int speculative;
eefe9a99
JH
2817
2818 if (!type)
2819 return;
2820 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 2821 ctx,
a0fd3373 2822 &final, NULL, &speculative);
68377e53 2823 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 2824 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
2825 fprintf (f, " token %i\n", (int)otr_token);
2826 if (ctx.outer_type || ctx.offset)
2827 {
2828 fprintf (f, " Contained in type:");
2829 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2830 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2831 ctx.offset);
2832 }
a0fd3373
JH
2833 if (ctx.speculative_outer_type)
2834 {
2835 fprintf (f, " Speculatively contained in type:");
2836 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
2837 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2838 ctx.speculative_offset);
2839 }
ec77d61f 2840
a0fd3373 2841 fprintf (f, " %s%s%s%s\n ",
ec77d61f 2842 final ? "This is a complete list." :
68377e53
JH
2843 "This is partial list; extra targets may be defined in other units.",
2844 ctx.maybe_in_construction ? " (base types included)" : "",
a0fd3373
JH
2845 ctx.maybe_derived_type ? " (derived types included)" : "",
2846 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
eefe9a99 2847 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
2848 {
2849 char *name = NULL;
a0fd3373
JH
2850 if (i == (unsigned)speculative)
2851 fprintf (f, "\n Targets that are not likely:\n"
ec77d61f
JH
2852 " ");
2853 if (in_lto_p)
2854 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2855 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2856 if (in_lto_p)
2857 free (name);
2858 if (!targets[i]->definition)
2859 fprintf (f, " (no definition%s)",
2860 DECL_DECLARED_INLINE_P (targets[i]->decl)
2861 ? " inline" : "");
2862 }
68377e53 2863 fprintf (f, "\n\n");
eefe9a99
JH
2864}
2865
0e1474e5
JH
2866
2867/* Return true if N can be possibly target of a polymorphic call of
2868 OTR_TYPE/OTR_TOKEN. */
2869
2870bool
2871possible_polymorphic_call_target_p (tree otr_type,
2872 HOST_WIDE_INT otr_token,
68377e53 2873 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
2874 struct cgraph_node *n)
2875{
2876 vec <cgraph_node *> targets;
2877 unsigned int i;
68377e53 2878 enum built_in_function fcode;
450ad0cd 2879 bool final;
0e1474e5 2880
68377e53
JH
2881 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2882 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2883 == BUILT_IN_UNREACHABLE
2884 || fcode == BUILT_IN_TRAP))
2885 return true;
2886
c203e8a7 2887 if (!odr_hash)
0e1474e5 2888 return true;
68377e53 2889 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 2890 for (i = 0; i < targets.length (); i++)
d52f5295 2891 if (n->semantically_equivalent_p (targets[i]))
0e1474e5 2892 return true;
450ad0cd
JH
2893
2894 /* At a moment we allow middle end to dig out new external declarations
2895 as a targets of polymorphic calls. */
67348ccc 2896 if (!final && !n->definition)
450ad0cd 2897 return true;
0e1474e5
JH
2898 return false;
2899}
2900
2901
2902/* After callgraph construction new external nodes may appear.
2903 Add them into the graph. */
2904
2905void
2906update_type_inheritance_graph (void)
2907{
2908 struct cgraph_node *n;
2909
c203e8a7 2910 if (!odr_hash)
0e1474e5
JH
2911 return;
2912 free_polymorphic_call_targets_hash ();
2913 timevar_push (TV_IPA_INHERITANCE);
68377e53 2914 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
2915 the unit. */
2916 FOR_EACH_FUNCTION (n)
67348ccc
DM
2917 if (DECL_VIRTUAL_P (n->decl)
2918 && !n->definition
d52f5295 2919 && n->real_symbol_p ())
549bcbd1
JH
2920 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2921 true);
0e1474e5
JH
2922 timevar_pop (TV_IPA_INHERITANCE);
2923}
bbc9396b
JH
2924
2925
2926/* Return true if N looks like likely target of a polymorphic call.
2927 Rule out cxa_pure_virtual, noreturns, function declared cold and
2928 other obvious cases. */
2929
2930bool
2931likely_target_p (struct cgraph_node *n)
2932{
2933 int flags;
2934 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 2935 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 2936 return false;
67348ccc 2937 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
2938 if (flags & ECF_NORETURN)
2939 return false;
2940 if (lookup_attribute ("cold",
67348ccc 2941 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
2942 return false;
2943 if (n->frequency < NODE_FREQUENCY_NORMAL)
2944 return false;
ccb05ef2
JH
2945 /* If there are no virtual tables refering the target alive,
2946 the only way the target can be called is an instance comming from other
2947 compilation unit; speculative devirtualization is build around an
2948 assumption that won't happen. */
2949 if (!referenced_from_vtable_p (n))
2950 return false;
bbc9396b
JH
2951 return true;
2952}
2953
2954/* The ipa-devirt pass.
3462aa02
JH
2955 When polymorphic call has only one likely target in the unit,
2956 turn it into speculative call. */
bbc9396b
JH
2957
2958static unsigned int
2959ipa_devirt (void)
2960{
2961 struct cgraph_node *n;
2962 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2963 struct cgraph_edge *e;
2964
2965 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2966 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 2967 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
2968
2969 FOR_EACH_DEFINED_FUNCTION (n)
2970 {
2971 bool update = false;
2972 if (dump_file && n->indirect_calls)
2973 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 2974 n->name (), n->order);
bbc9396b
JH
2975 for (e = n->indirect_calls; e; e = e->next_callee)
2976 if (e->indirect_info->polymorphic)
2977 {
2978 struct cgraph_node *likely_target = NULL;
2979 void *cache_token;
2980 bool final;
a0fd3373 2981 int speculative_targets;
bbc9396b
JH
2982 vec <cgraph_node *>targets
2983 = possible_polymorphic_call_targets
a0fd3373 2984 (e, &final, &cache_token, &speculative_targets);
bbc9396b
JH
2985 unsigned int i;
2986
2987 if (dump_file)
2988 dump_possible_polymorphic_call_targets
2989 (dump_file, e);
3462aa02 2990
bbc9396b
JH
2991 npolymorphic++;
2992
bbc9396b
JH
2993 if (!cgraph_maybe_hot_edge_p (e))
2994 {
2995 if (dump_file)
ec77d61f 2996 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
2997 ncold++;
2998 continue;
2999 }
3000 if (e->speculative)
3001 {
3002 if (dump_file)
ec77d61f 3003 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
3004 nspeculated++;
3005
3006 /* When dumping see if we agree with speculation. */
3007 if (!dump_file)
3008 continue;
3009 }
3010 if (pointer_set_contains (bad_call_targets,
3011 cache_token))
3012 {
3013 if (dump_file)
ec77d61f 3014 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
3015 nmultiple++;
3016 continue;
3017 }
c3284718 3018 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
3019 if (likely_target_p (targets[i]))
3020 {
3021 if (likely_target)
3022 {
a0fd3373 3023 if (i < (unsigned) speculative_targets)
ec77d61f
JH
3024 {
3025 likely_target = NULL;
3026 if (dump_file)
3027 fprintf (dump_file, "More than one likely target\n\n");
3028 nmultiple++;
3029 }
bbc9396b
JH
3030 break;
3031 }
3032 likely_target = targets[i];
3033 }
3034 if (!likely_target)
3035 {
3036 pointer_set_insert (bad_call_targets, cache_token);
3037 continue;
3038 }
3039 /* This is reached only when dumping; check if we agree or disagree
3040 with the speculation. */
3041 if (e->speculative)
3042 {
3043 struct cgraph_edge *e2;
3044 struct ipa_ref *ref;
3045 cgraph_speculative_call_info (e, e2, e, ref);
d52f5295
ML
3046 if (e2->callee->ultimate_alias_target ()
3047 == likely_target->ultimate_alias_target ())
bbc9396b 3048 {
ec77d61f 3049 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
3050 nok++;
3051 }
3052 else
3053 {
ec77d61f 3054 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
3055 nwrong++;
3056 }
3057 continue;
3058 }
67348ccc 3059 if (!likely_target->definition)
bbc9396b
JH
3060 {
3061 if (dump_file)
ec77d61f 3062 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
3063 nnotdefined++;
3064 continue;
3065 }
3066 /* Do not introduce new references to external symbols. While we
3067 can handle these just well, it is common for programs to
3068 incorrectly with headers defining methods they are linked
3069 with. */
67348ccc 3070 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
3071 {
3072 if (dump_file)
ec77d61f 3073 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
3074 nexternal++;
3075 continue;
3076 }
570215f9
JM
3077 /* Don't use an implicitly-declared destructor (c++/58678). */
3078 struct cgraph_node *non_thunk_target
d52f5295 3079 = likely_target->function_symbol ();
570215f9
JM
3080 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3081 && DECL_COMDAT (non_thunk_target->decl))
3082 {
3083 if (dump_file)
3084 fprintf (dump_file, "Target is artificial\n\n");
3085 nartificial++;
3086 continue;
3087 }
d52f5295
ML
3088 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3089 && likely_target->can_be_discarded_p ())
bbc9396b
JH
3090 {
3091 if (dump_file)
ec77d61f 3092 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
3093 noverwritable++;
3094 continue;
3095 }
2b5f0895 3096 else if (dbg_cnt (devirt))
bbc9396b 3097 {
2b5f0895
XDL
3098 if (dump_enabled_p ())
3099 {
807b7d62 3100 location_t locus = gimple_location_safe (e->call_stmt);
2b5f0895
XDL
3101 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3102 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3103 n->name (), n->order,
3104 likely_target->name (),
3105 likely_target->order);
3106 }
d52f5295 3107 if (!likely_target->can_be_discarded_p ())
5b79657a
JH
3108 {
3109 cgraph_node *alias;
d52f5295 3110 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
5b79657a
JH
3111 if (alias)
3112 likely_target = alias;
3113 }
bbc9396b
JH
3114 nconverted++;
3115 update = true;
3116 cgraph_turn_edge_to_speculative
3117 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3118 }
3119 }
3120 if (update)
3121 inline_update_overall_summary (n);
3122 }
3123 pointer_set_destroy (bad_call_targets);
3124
3125 if (dump_file)
3126 fprintf (dump_file,
3127 "%i polymorphic calls, %i devirtualized,"
3128 " %i speculatively devirtualized, %i cold\n"
3129 "%i have multiple targets, %i overwritable,"
3130 " %i already speculated (%i agree, %i disagree),"
570215f9 3131 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
3132 npolymorphic, ndevirtualized, nconverted, ncold,
3133 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 3134 nexternal, nnotdefined, nartificial);
bbc9396b
JH
3135 return ndevirtualized ? TODO_remove_functions : 0;
3136}
3137
bbc9396b
JH
3138namespace {
3139
3140const pass_data pass_data_ipa_devirt =
3141{
3142 IPA_PASS, /* type */
3143 "devirt", /* name */
3144 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
3145 TV_IPA_DEVIRT, /* tv_id */
3146 0, /* properties_required */
3147 0, /* properties_provided */
3148 0, /* properties_destroyed */
3149 0, /* todo_flags_start */
3150 ( TODO_dump_symtab ), /* todo_flags_finish */
3151};
3152
3153class pass_ipa_devirt : public ipa_opt_pass_d
3154{
3155public:
c3284718
RS
3156 pass_ipa_devirt (gcc::context *ctxt)
3157 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3158 NULL, /* generate_summary */
3159 NULL, /* write_summary */
3160 NULL, /* read_summary */
3161 NULL, /* write_optimization_summary */
3162 NULL, /* read_optimization_summary */
3163 NULL, /* stmt_fixup */
3164 0, /* function_transform_todo_flags_start */
3165 NULL, /* function_transform */
3166 NULL) /* variable_transform */
bbc9396b
JH
3167 {}
3168
3169 /* opt_pass methods: */
1a3d085c
TS
3170 virtual bool gate (function *)
3171 {
3172 return (flag_devirtualize
3173 && flag_devirtualize_speculatively
3174 && optimize);
3175 }
3176
be55bfe6 3177 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
3178
3179}; // class pass_ipa_devirt
3180
3181} // anon namespace
3182
3183ipa_opt_pass_d *
3184make_pass_ipa_devirt (gcc::context *ctxt)
3185{
3186 return new pass_ipa_devirt (ctxt);
3187}
3188
eefe9a99 3189#include "gt-ipa-devirt.h"